comment stringlengths 1 45k | method_body stringlengths 23 281k | target_code stringlengths 0 5.16k | method_body_after stringlengths 12 281k | context_before stringlengths 8 543k | context_after stringlengths 8 543k |
|---|---|---|---|---|---|
Understandable. I was confused as well initially after your comment. I thought I did a refactoring error. | private HttpClient getHttpClient(HttpClient httpClient) {
if (httpClient == null || getTestMode() == TestMode.PLAYBACK) {
return interceptorManager.getPlaybackClient();
}
return httpClient;
} | if (httpClient == null || getTestMode() == TestMode.PLAYBACK) { | private HttpClient getHttpClient(HttpClient httpClient) {
if (httpClient == null || getTestMode() == TestMode.PLAYBACK) {
return interceptorManager.getPlaybackClient();
}
return httpClient;
} | class SipRoutingIntegrationTestBase extends TestBase {
private static final String CONNECTION_STRING = Configuration.getGlobalConfiguration()
.get("COMMUNICATION_LIVETEST_DYNAMIC_CONNECTION_STRING", "endpoint=https:
protected static final String SET_TRUNK_ROUTE_NAME = "route99";
protected static final String SET_TRUNK_ROUTE_NUMBER_PATTERN = "99.*";
protected static final SipTrunkRoute SET_TRUNK_ROUTE =
new SipTrunkRoute(SET_TRUNK_ROUTE_NAME, SET_TRUNK_ROUTE_NUMBER_PATTERN);
protected static final String FIRST_FQDN = getUniqueFqdn("first");
protected static final String SECOND_FQDN = getUniqueFqdn("second");
protected static final String THIRD_FQDN = getUniqueFqdn("third");
protected static final String FOURTH_FQDN = getUniqueFqdn("fourth");
protected static final String FIFTH_FQDN = getUniqueFqdn("fifth");
protected static final String SIXTH_FQDN = getUniqueFqdn("sixth");
protected static final String DELETE_FQDN = getUniqueFqdn("delete");
protected static final String SET_TRUNK_FQDN = getUniqueFqdn("set");
protected static final String NOT_EXISTING_FQDN = "not.existing.fqdn";
protected static final int SET_TRUNK_PORT = 4567;
protected static final SipTrunk SET_TRUNK = new SipTrunk(SET_TRUNK_FQDN, SET_TRUNK_PORT);
protected static final int SET_TRUNK_UPDATED_PORT = 7651;
protected static final SipTrunk SET_UPDATED_TRUNK = new SipTrunk(SET_TRUNK_FQDN, SET_TRUNK_UPDATED_PORT);
protected static final String SET_TRUNK_INVALID_FQDN = "_";
protected static final int SET_TRUNK_INVALID_PORT = -1;
protected static final int DELETE_PORT = 5678;
protected static final SipTrunk DELETE_TRUNK = new SipTrunk(DELETE_FQDN, DELETE_PORT);
protected static final List<SipTrunk> EXPECTED_TRUNKS = asList(
new SipTrunk(FIRST_FQDN, 1234),
new SipTrunk(SECOND_FQDN, 2345),
new SipTrunk(THIRD_FQDN, 3456)
);
protected static final List<SipTrunk> UPDATED_TRUNKS = asList(
new SipTrunk(FIRST_FQDN, 9876),
new SipTrunk(FOURTH_FQDN, 2340),
new SipTrunk(FIFTH_FQDN, 3460),
new SipTrunk(SIXTH_FQDN, 4461)
);
protected static final List<SipTrunkRoute> EXPECTED_ROUTES = asList(
new SipTrunkRoute("route0", "0.*").setDescription("desc0"),
new SipTrunkRoute("route1", "1.*").setDescription("desc1"),
new SipTrunkRoute("route2", "2.*").setDescription("desc2")
);
protected static final List<SipTrunkRoute> EXPECTED_ROUTES_WITH_REFERENCED_TRUNK = asList(
new SipTrunkRoute("route0", "0.*").setDescription("desc0"),
new SipTrunkRoute("route1", "1.*").setDescription("desc1"),
new SipTrunkRoute("route2", "2.*").setDescription("desc2")
.setTrunks(asList(SET_TRUNK_FQDN))
);
protected static final List<SipTrunkRoute> UPDATED_ROUTES = asList(
new SipTrunkRoute("route10", "9.*").setDescription("des90"),
new SipTrunkRoute("route0", "8.*").setDescription("desc91"),
new SipTrunkRoute("route21", "7.*").setDescription("desc92"),
new SipTrunkRoute("route24", "4.*").setDescription("desc44")
);
private static final StringJoiner JSON_PROPERTIES_TO_REDACT =
new StringJoiner("\":\"|\"", "\"", "\":\"")
.add("id")
.add("phoneNumber");
private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN =
Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"} | class SipRoutingIntegrationTestBase extends TestBase {
private static final String CONNECTION_STRING = Configuration.getGlobalConfiguration()
.get("COMMUNICATION_LIVETEST_DYNAMIC_CONNECTION_STRING", "endpoint=https:
protected static final String SET_TRUNK_ROUTE_NAME = "route99";
protected static final String SET_TRUNK_ROUTE_NUMBER_PATTERN = "99.*";
protected static final SipTrunkRoute SET_TRUNK_ROUTE =
new SipTrunkRoute(SET_TRUNK_ROUTE_NAME, SET_TRUNK_ROUTE_NUMBER_PATTERN);
protected static final String FIRST_FQDN = getUniqueFqdn("first");
protected static final String SECOND_FQDN = getUniqueFqdn("second");
protected static final String THIRD_FQDN = getUniqueFqdn("third");
protected static final String FOURTH_FQDN = getUniqueFqdn("fourth");
protected static final String FIFTH_FQDN = getUniqueFqdn("fifth");
protected static final String SIXTH_FQDN = getUniqueFqdn("sixth");
protected static final String DELETE_FQDN = getUniqueFqdn("delete");
protected static final String SET_TRUNK_FQDN = getUniqueFqdn("set");
protected static final String NOT_EXISTING_FQDN = "not.existing.fqdn";
protected static final int SET_TRUNK_PORT = 4567;
protected static final SipTrunk SET_TRUNK = new SipTrunk(SET_TRUNK_FQDN, SET_TRUNK_PORT);
protected static final int SET_TRUNK_UPDATED_PORT = 7651;
protected static final SipTrunk SET_UPDATED_TRUNK = new SipTrunk(SET_TRUNK_FQDN, SET_TRUNK_UPDATED_PORT);
protected static final String SET_TRUNK_INVALID_FQDN = "_";
protected static final int SET_TRUNK_INVALID_PORT = -1;
protected static final int DELETE_PORT = 5678;
protected static final SipTrunk DELETE_TRUNK = new SipTrunk(DELETE_FQDN, DELETE_PORT);
protected static final List<SipTrunk> EXPECTED_TRUNKS = asList(
new SipTrunk(FIRST_FQDN, 1234),
new SipTrunk(SECOND_FQDN, 2345),
new SipTrunk(THIRD_FQDN, 3456)
);
protected static final List<SipTrunk> UPDATED_TRUNKS = asList(
new SipTrunk(FIRST_FQDN, 9876),
new SipTrunk(FOURTH_FQDN, 2340),
new SipTrunk(FIFTH_FQDN, 3460),
new SipTrunk(SIXTH_FQDN, 4461)
);
protected static final List<SipTrunkRoute> EXPECTED_ROUTES = asList(
new SipTrunkRoute("route0", "0.*").setDescription("desc0"),
new SipTrunkRoute("route1", "1.*").setDescription("desc1"),
new SipTrunkRoute("route2", "2.*").setDescription("desc2")
);
protected static final List<SipTrunkRoute> EXPECTED_ROUTES_WITH_REFERENCED_TRUNK = asList(
new SipTrunkRoute("route0", "0.*").setDescription("desc0"),
new SipTrunkRoute("route1", "1.*").setDescription("desc1"),
new SipTrunkRoute("route2", "2.*").setDescription("desc2")
.setTrunks(asList(SET_TRUNK_FQDN))
);
protected static final List<SipTrunkRoute> UPDATED_ROUTES = asList(
new SipTrunkRoute("route10", "9.*").setDescription("des90"),
new SipTrunkRoute("route0", "8.*").setDescription("desc91"),
new SipTrunkRoute("route21", "7.*").setDescription("desc92"),
new SipTrunkRoute("route24", "4.*").setDescription("desc44")
);
private static final StringJoiner JSON_PROPERTIES_TO_REDACT =
new StringJoiner("\":\"|\"", "\"", "\":\"")
.add("id")
.add("phoneNumber");
private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN =
Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"} |
Unused? | public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors
.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
byte[] eventBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
final ArrayList<EventData> eventsList = new ArrayList<>();
for (int number = 0; number < options.getCount(); number++) {
final EventData eventData = EventData.create(eventBytes);
eventData.getProperties().put("index", number);
eventsList.add(eventData);
}
this.events = Collections.unmodifiableList(eventsList);
eventDataBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
} | public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
protected final List<EventData> events;
protected byte[] eventDataBytes;
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
}
protected String generateString(int targetLength) {
int leftLimit = 97;
int rightLimit = 122;
Random random = new Random();
String generatedString = random.ints(leftLimit, rightLimit + 1)
.limit(targetLength)
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
.toString();
return generatedString;
}
/**
* Sends the number of messages to {@code partitionId}.
*
* @param client Client used to send message.
* @param partitionId Destination partition id.
* @param totalMessagesToSend Number of messages to send.
*
* @return A Mono that completes when all messages are sent.
*
* @throws RuntimeException if the partition sender could not be created. Or an exception occurred while sending
* the messages.
*/
Mono<Void> sendMessages(EventHubClient client, String partitionId, int totalMessagesToSend) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
int numberOfMessages = totalMessagesToSend;
while (numberOfMessages > 0) {
currentBatch = sender.createBatch();
addEvents(currentBatch, numberOfMessages);
try {
sender.sendSync(currentBatch);
numberOfMessages = numberOfMessages - currentBatch.getSize();
} catch (EventHubException e) {
System.err.println("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
},
sender -> Mono.fromCompletionStage(sender.close()));
}
Mono<Void> preLoadEvents(EventHubClient client, String partitionId, int totalMessagesToSend) {
final AtomicLong eventsToSend = new AtomicLong(totalMessagesToSend);
final AtomicLong totalEvents = new AtomicLong(0);
Mono<Void> partitionMono;
if (CoreUtils.isNullOrEmpty(partitionId)) {
partitionMono = Mono.fromFuture(client.getRuntimeInformation())
.flatMap(eventHubRuntimeInformation -> {
String[] partitionIds = eventHubRuntimeInformation.getPartitionIds();
return Flux.fromArray(partitionIds)
.map(partId -> Mono.fromFuture(client.getPartitionRuntimeInformation(partId))
.map(partitionRuntimeInformation -> {
totalEvents.addAndGet(partitionRuntimeInformation.getLastEnqueuedSequenceNumber() - partitionRuntimeInformation.getBeginSequenceNumber());
return Mono.empty();
})).then();
}).then();
} else {
partitionMono = Mono.fromFuture(client.getPartitionRuntimeInformation(partitionId))
.map(partitionProperties -> {
totalEvents.addAndGet(partitionProperties.getLastEnqueuedSequenceNumber() - partitionProperties.getBeginSequenceNumber());
return Mono.empty();
}).then();
}
if (!CoreUtils.isNullOrEmpty(partitionId)) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return partitionMono.then(Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
currentBatch = sender.createBatch();
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
sender.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
},
sender -> Mono.fromCompletionStage(sender.close())));
} else {
return partitionMono.then(Mono.defer(() -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
try {
currentBatch = client.createBatch();
} catch (EventHubException e) {
throw new RuntimeException("Error creating Batch", e);
}
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
client.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
}));
}
}
/**
* Adds the number of messages to the batch. The size of the message is set using {@link
* PerfStressOptions
*
* @param batch The batch to add messages to.
* @param numberOfMessages Number of messages to add.
*/
void addEvents(EventDataBatch batch, int numberOfMessages) {
for (int i = 0; i < numberOfMessages; i++) {
final int index = numberOfMessages % events.size();
final EventData event = events.get(index);
try {
if (!batch.tryAdd(event)) {
System.out.printf("Only added %s of %s events.%n", i, numberOfMessages);
break;
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
}
}
protected EventData createEvent() {
EventData eventData = EventData.create(eventDataBytes);
return eventData;
}
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
}
} | |
Why not Mono.fromRunnable? | public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
} | return Mono.fromCallable(() -> { | public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
protected final List<EventData> events;
protected byte[] eventDataBytes;
public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors
.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
byte[] eventBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
final ArrayList<EventData> eventsList = new ArrayList<>();
for (int number = 0; number < options.getCount(); number++) {
final EventData eventData = EventData.create(eventBytes);
eventData.getProperties().put("index", number);
eventsList.add(eventData);
}
this.events = Collections.unmodifiableList(eventsList);
eventDataBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
}
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
protected String generateString(int targetLength) {
int leftLimit = 97;
int rightLimit = 122;
Random random = new Random();
String generatedString = random.ints(leftLimit, rightLimit + 1)
.limit(targetLength)
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
.toString();
return generatedString;
}
/**
* Sends the number of messages to {@code partitionId}.
*
* @param client Client used to send message.
* @param partitionId Destination partition id.
* @param totalMessagesToSend Number of messages to send.
*
* @return A Mono that completes when all messages are sent.
*
* @throws RuntimeException if the partition sender could not be created. Or an exception occurred while sending
* the messages.
*/
Mono<Void> sendMessages(EventHubClient client, String partitionId, int totalMessagesToSend) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
int numberOfMessages = totalMessagesToSend;
while (numberOfMessages > 0) {
currentBatch = sender.createBatch();
addEvents(currentBatch, numberOfMessages);
try {
sender.sendSync(currentBatch);
numberOfMessages = numberOfMessages - currentBatch.getSize();
} catch (EventHubException e) {
System.err.println("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
},
sender -> Mono.fromCompletionStage(sender.close()));
}
Mono<Void> preLoadEvents(EventHubClient client, String partitionId, int totalMessagesToSend) {
final AtomicLong eventsToSend = new AtomicLong(totalMessagesToSend);
final AtomicLong totalEvents = new AtomicLong(0);
Mono<Void> partitionMono;
if (CoreUtils.isNullOrEmpty(partitionId)) {
partitionMono = Mono.fromFuture(client.getRuntimeInformation())
.flatMap(eventHubRuntimeInformation -> {
String[] partitionIds = eventHubRuntimeInformation.getPartitionIds();
return Flux.fromArray(partitionIds)
.map(partId -> Mono.fromFuture(client.getPartitionRuntimeInformation(partId))
.map(partitionRuntimeInformation -> {
totalEvents.addAndGet(partitionRuntimeInformation.getLastEnqueuedSequenceNumber() - partitionRuntimeInformation.getBeginSequenceNumber());
return Mono.empty();
})).then();
}).then();
} else {
partitionMono = Mono.fromFuture(client.getPartitionRuntimeInformation(partitionId))
.map(partitionProperties -> {
totalEvents.addAndGet(partitionProperties.getLastEnqueuedSequenceNumber() - partitionProperties.getBeginSequenceNumber());
return Mono.empty();
}).then();
}
if (!CoreUtils.isNullOrEmpty(partitionId)) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return partitionMono.then(Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
currentBatch = sender.createBatch();
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
sender.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
},
sender -> Mono.fromCompletionStage(sender.close())));
} else {
return partitionMono.then(Mono.defer(() -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
try {
currentBatch = client.createBatch();
} catch (EventHubException e) {
throw new RuntimeException("Error creating Batch", e);
}
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
client.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
}));
}
}
/**
* Adds the number of messages to the batch. The size of the message is set using {@link
* PerfStressOptions
*
* @param batch The batch to add messages to.
* @param numberOfMessages Number of messages to add.
*/
void addEvents(EventDataBatch batch, int numberOfMessages) {
for (int i = 0; i < numberOfMessages; i++) {
final int index = numberOfMessages % events.size();
final EventData event = events.get(index);
try {
if (!batch.tryAdd(event)) {
System.out.printf("Only added %s of %s events.%n", i, numberOfMessages);
break;
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
}
}
protected EventData createEvent() {
EventData eventData = EventData.create(eventDataBytes);
return eventData;
}
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
}
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
} |
cleaned up. | public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors
.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
byte[] eventBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
final ArrayList<EventData> eventsList = new ArrayList<>();
for (int number = 0; number < options.getCount(); number++) {
final EventData eventData = EventData.create(eventBytes);
eventData.getProperties().put("index", number);
eventsList.add(eventData);
}
this.events = Collections.unmodifiableList(eventsList);
eventDataBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
} | public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
protected final List<EventData> events;
protected byte[] eventDataBytes;
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
}
protected String generateString(int targetLength) {
int leftLimit = 97;
int rightLimit = 122;
Random random = new Random();
String generatedString = random.ints(leftLimit, rightLimit + 1)
.limit(targetLength)
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
.toString();
return generatedString;
}
/**
* Sends the number of messages to {@code partitionId}.
*
* @param client Client used to send message.
* @param partitionId Destination partition id.
* @param totalMessagesToSend Number of messages to send.
*
* @return A Mono that completes when all messages are sent.
*
* @throws RuntimeException if the partition sender could not be created. Or an exception occurred while sending
* the messages.
*/
Mono<Void> sendMessages(EventHubClient client, String partitionId, int totalMessagesToSend) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
int numberOfMessages = totalMessagesToSend;
while (numberOfMessages > 0) {
currentBatch = sender.createBatch();
addEvents(currentBatch, numberOfMessages);
try {
sender.sendSync(currentBatch);
numberOfMessages = numberOfMessages - currentBatch.getSize();
} catch (EventHubException e) {
System.err.println("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
},
sender -> Mono.fromCompletionStage(sender.close()));
}
Mono<Void> preLoadEvents(EventHubClient client, String partitionId, int totalMessagesToSend) {
final AtomicLong eventsToSend = new AtomicLong(totalMessagesToSend);
final AtomicLong totalEvents = new AtomicLong(0);
Mono<Void> partitionMono;
if (CoreUtils.isNullOrEmpty(partitionId)) {
partitionMono = Mono.fromFuture(client.getRuntimeInformation())
.flatMap(eventHubRuntimeInformation -> {
String[] partitionIds = eventHubRuntimeInformation.getPartitionIds();
return Flux.fromArray(partitionIds)
.map(partId -> Mono.fromFuture(client.getPartitionRuntimeInformation(partId))
.map(partitionRuntimeInformation -> {
totalEvents.addAndGet(partitionRuntimeInformation.getLastEnqueuedSequenceNumber() - partitionRuntimeInformation.getBeginSequenceNumber());
return Mono.empty();
})).then();
}).then();
} else {
partitionMono = Mono.fromFuture(client.getPartitionRuntimeInformation(partitionId))
.map(partitionProperties -> {
totalEvents.addAndGet(partitionProperties.getLastEnqueuedSequenceNumber() - partitionProperties.getBeginSequenceNumber());
return Mono.empty();
}).then();
}
if (!CoreUtils.isNullOrEmpty(partitionId)) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return partitionMono.then(Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
currentBatch = sender.createBatch();
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
sender.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
},
sender -> Mono.fromCompletionStage(sender.close())));
} else {
return partitionMono.then(Mono.defer(() -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
try {
currentBatch = client.createBatch();
} catch (EventHubException e) {
throw new RuntimeException("Error creating Batch", e);
}
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
client.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
}));
}
}
/**
* Adds the number of messages to the batch. The size of the message is set using {@link
* PerfStressOptions
*
* @param batch The batch to add messages to.
* @param numberOfMessages Number of messages to add.
*/
void addEvents(EventDataBatch batch, int numberOfMessages) {
for (int i = 0; i < numberOfMessages; i++) {
final int index = numberOfMessages % events.size();
final EventData event = events.get(index);
try {
if (!batch.tryAdd(event)) {
System.out.printf("Only added %s of %s events.%n", i, numberOfMessages);
break;
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
}
}
protected EventData createEvent() {
EventData eventData = EventData.create(eventDataBytes);
return eventData;
}
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
}
} | |
Exception handling is enforced in Runnable, with Callable it gets bubbled up I believe. | public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
} | return Mono.fromCallable(() -> { | public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
protected final List<EventData> events;
protected byte[] eventDataBytes;
public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors
.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
byte[] eventBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
final ArrayList<EventData> eventsList = new ArrayList<>();
for (int number = 0; number < options.getCount(); number++) {
final EventData eventData = EventData.create(eventBytes);
eventData.getProperties().put("index", number);
eventsList.add(eventData);
}
this.events = Collections.unmodifiableList(eventsList);
eventDataBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
}
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
protected String generateString(int targetLength) {
int leftLimit = 97;
int rightLimit = 122;
Random random = new Random();
String generatedString = random.ints(leftLimit, rightLimit + 1)
.limit(targetLength)
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
.toString();
return generatedString;
}
/**
* Sends the number of messages to {@code partitionId}.
*
* @param client Client used to send message.
* @param partitionId Destination partition id.
* @param totalMessagesToSend Number of messages to send.
*
* @return A Mono that completes when all messages are sent.
*
* @throws RuntimeException if the partition sender could not be created. Or an exception occurred while sending
* the messages.
*/
Mono<Void> sendMessages(EventHubClient client, String partitionId, int totalMessagesToSend) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
int numberOfMessages = totalMessagesToSend;
while (numberOfMessages > 0) {
currentBatch = sender.createBatch();
addEvents(currentBatch, numberOfMessages);
try {
sender.sendSync(currentBatch);
numberOfMessages = numberOfMessages - currentBatch.getSize();
} catch (EventHubException e) {
System.err.println("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
},
sender -> Mono.fromCompletionStage(sender.close()));
}
Mono<Void> preLoadEvents(EventHubClient client, String partitionId, int totalMessagesToSend) {
final AtomicLong eventsToSend = new AtomicLong(totalMessagesToSend);
final AtomicLong totalEvents = new AtomicLong(0);
Mono<Void> partitionMono;
if (CoreUtils.isNullOrEmpty(partitionId)) {
partitionMono = Mono.fromFuture(client.getRuntimeInformation())
.flatMap(eventHubRuntimeInformation -> {
String[] partitionIds = eventHubRuntimeInformation.getPartitionIds();
return Flux.fromArray(partitionIds)
.map(partId -> Mono.fromFuture(client.getPartitionRuntimeInformation(partId))
.map(partitionRuntimeInformation -> {
totalEvents.addAndGet(partitionRuntimeInformation.getLastEnqueuedSequenceNumber() - partitionRuntimeInformation.getBeginSequenceNumber());
return Mono.empty();
})).then();
}).then();
} else {
partitionMono = Mono.fromFuture(client.getPartitionRuntimeInformation(partitionId))
.map(partitionProperties -> {
totalEvents.addAndGet(partitionProperties.getLastEnqueuedSequenceNumber() - partitionProperties.getBeginSequenceNumber());
return Mono.empty();
}).then();
}
if (!CoreUtils.isNullOrEmpty(partitionId)) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return partitionMono.then(Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
currentBatch = sender.createBatch();
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
sender.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
},
sender -> Mono.fromCompletionStage(sender.close())));
} else {
return partitionMono.then(Mono.defer(() -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
try {
currentBatch = client.createBatch();
} catch (EventHubException e) {
throw new RuntimeException("Error creating Batch", e);
}
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
client.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
}));
}
}
/**
* Adds the number of messages to the batch. The size of the message is set using {@link
* PerfStressOptions
*
* @param batch The batch to add messages to.
* @param numberOfMessages Number of messages to add.
*/
void addEvents(EventDataBatch batch, int numberOfMessages) {
for (int i = 0; i < numberOfMessages; i++) {
final int index = numberOfMessages % events.size();
final EventData event = events.get(index);
try {
if (!batch.tryAdd(event)) {
System.out.printf("Only added %s of %s events.%n", i, numberOfMessages);
break;
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
}
}
protected EventData createEvent() {
EventData eventData = EventData.create(eventDataBytes);
return eventData;
}
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
}
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
} |
thanks! i learnt that today. | public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
} | return Mono.fromCallable(() -> { | public Mono<Void> cleanupAsync() {
return Mono.fromCallable(() -> {
eventHubClient.close();
executor.shutdownNow();
return 1;
}).then();
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
protected final List<EventData> events;
protected byte[] eventDataBytes;
public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors
.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
byte[] eventBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
final ArrayList<EventData> eventsList = new ArrayList<>();
for (int number = 0; number < options.getCount(); number++) {
final EventData eventData = EventData.create(eventBytes);
eventData.getProperties().put("index", number);
eventsList.add(eventData);
}
this.events = Collections.unmodifiableList(eventsList);
eventDataBytes = generateString(100).getBytes(StandardCharsets.UTF_8);
}
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
protected String generateString(int targetLength) {
int leftLimit = 97;
int rightLimit = 122;
Random random = new Random();
String generatedString = random.ints(leftLimit, rightLimit + 1)
.limit(targetLength)
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
.toString();
return generatedString;
}
/**
* Sends the number of messages to {@code partitionId}.
*
* @param client Client used to send message.
* @param partitionId Destination partition id.
* @param totalMessagesToSend Number of messages to send.
*
* @return A Mono that completes when all messages are sent.
*
* @throws RuntimeException if the partition sender could not be created. Or an exception occurred while sending
* the messages.
*/
Mono<Void> sendMessages(EventHubClient client, String partitionId, int totalMessagesToSend) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
int numberOfMessages = totalMessagesToSend;
while (numberOfMessages > 0) {
currentBatch = sender.createBatch();
addEvents(currentBatch, numberOfMessages);
try {
sender.sendSync(currentBatch);
numberOfMessages = numberOfMessages - currentBatch.getSize();
} catch (EventHubException e) {
System.err.println("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
},
sender -> Mono.fromCompletionStage(sender.close()));
}
Mono<Void> preLoadEvents(EventHubClient client, String partitionId, int totalMessagesToSend) {
final AtomicLong eventsToSend = new AtomicLong(totalMessagesToSend);
final AtomicLong totalEvents = new AtomicLong(0);
Mono<Void> partitionMono;
if (CoreUtils.isNullOrEmpty(partitionId)) {
partitionMono = Mono.fromFuture(client.getRuntimeInformation())
.flatMap(eventHubRuntimeInformation -> {
String[] partitionIds = eventHubRuntimeInformation.getPartitionIds();
return Flux.fromArray(partitionIds)
.map(partId -> Mono.fromFuture(client.getPartitionRuntimeInformation(partId))
.map(partitionRuntimeInformation -> {
totalEvents.addAndGet(partitionRuntimeInformation.getLastEnqueuedSequenceNumber() - partitionRuntimeInformation.getBeginSequenceNumber());
return Mono.empty();
})).then();
}).then();
} else {
partitionMono = Mono.fromFuture(client.getPartitionRuntimeInformation(partitionId))
.map(partitionProperties -> {
totalEvents.addAndGet(partitionProperties.getLastEnqueuedSequenceNumber() - partitionProperties.getBeginSequenceNumber());
return Mono.empty();
}).then();
}
if (!CoreUtils.isNullOrEmpty(partitionId)) {
CompletableFuture<PartitionSender> createSenderFuture;
try {
createSenderFuture = client.createPartitionSender(partitionId);
} catch (EventHubException e) {
createSenderFuture = new CompletableFuture<>();
createSenderFuture.completeExceptionally(
new RuntimeException("Unable to create partition sender: " + partitionId, e));
}
return partitionMono.then(Mono.usingWhen(
Mono.fromCompletionStage(createSenderFuture),
sender -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
currentBatch = sender.createBatch();
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
sender.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
},
sender -> Mono.fromCompletionStage(sender.close())));
} else {
return partitionMono.then(Mono.defer(() -> {
EventDataBatch currentBatch;
if (totalEvents.get() < eventsToSend.get()) {
eventsToSend.set(eventsToSend.get() - totalEvents.get());
while (eventsToSend.get() > 0) {
try {
currentBatch = client.createBatch();
} catch (EventHubException e) {
throw new RuntimeException("Error creating Batch", e);
}
EventData event = createEvent();
try {
while (currentBatch.tryAdd(event)) {
eventsToSend.getAndDecrement();
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
try {
client.sendSync(currentBatch);
} catch (EventHubException e) {
throw new RuntimeException("Could not send batch. Error: " + e);
}
}
System.out.printf("%s: Sent %d messages.%n", partitionId, totalMessagesToSend);
return Mono.empty();
} else {
return Mono.empty();
}
}));
}
}
/**
* Adds the number of messages to the batch. The size of the message is set using {@link
* PerfStressOptions
*
* @param batch The batch to add messages to.
* @param numberOfMessages Number of messages to add.
*/
void addEvents(EventDataBatch batch, int numberOfMessages) {
for (int i = 0; i < numberOfMessages; i++) {
final int index = numberOfMessages % events.size();
final EventData event = events.get(index);
try {
if (!batch.tryAdd(event)) {
System.out.printf("Only added %s of %s events.%n", i, numberOfMessages);
break;
}
} catch (PayloadSizeExceededException e) {
throw new RuntimeException("Event was too large for a single batch.", e);
}
}
}
protected EventData createEvent() {
EventData eventData = EventData.create(eventDataBytes);
return eventData;
}
} | class ServiceBatchTest<TOptions extends PerfStressOptions> extends BatchPerfTest<TOptions> {
private final String eventHubName;
private final String connectionString;
private final String poolSize;
protected EventHubClient eventHubClient;
private ScheduledExecutorService executor;
public ServiceBatchTest(TOptions options) {
super(options);
connectionString = System.getenv("EVENTHUBS_CONNECTION_STRING");
eventHubName = System.getenv("EVENTHUB_NAME");
poolSize = System.getenv("EVENTHUB_POOL_SIZE");
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw new IllegalStateException("Environment variable EVENTHUBS_CONNECTION_STRING must be set");
}
if (CoreUtils.isNullOrEmpty(eventHubName)) {
System.out.println("Environment variable EVENTHUB_NAME must be set");
System.exit(1);
}
executor = Executors.newScheduledThreadPool(poolSize != null ? Integer.valueOf(poolSize) : 4);
try {
eventHubClient = EventHubClient.createFromConnectionStringSync(
new ConnectionStringBuilder(connectionString).setEventHubName(eventHubName).toString(), executor);
} catch (EventHubException | IOException e) {
throw new RuntimeException("Error creating EventHub client.", e);
}
}
@Override
public Mono<Void> setupAsync() {
return Mono.empty();
}
@Override
} |
will this logic be safe to remove when we delete the deprecated API? | public PartitionKey build() {
if(this.partitionKeyValues.size() == 0) {
throw new IllegalArgumentException("No partition key value has been specified");
}
if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) {
return PartitionKey.NONE;
}
if(this.partitionKeyValues.size() > 1 && this.partitionKeyValues.contains(PartitionKey.NONE)) {
throw new IllegalStateException("PartitionKey.None can't be used with multiple paths");
}
PartitionKeyInternal partitionKeyInternal;
Object[] valueArray = new Object[this.partitionKeyValues.size()];
for(int i = 0; i < this.partitionKeyValues.size(); i++) {
Object val = this.partitionKeyValues.get(i);
if(PartitionKey.NONE.equals(val)) {
valueArray[i] = Undefined.value();
}
else {
valueArray[i] = val;
}
}
partitionKeyInternal = PartitionKeyInternal.fromObjectArray(valueArray, true);
StringBuilder backendValues = new StringBuilder();
if (valueArray.length == 1) {
backendValues.append((String) valueArray[0]);
} else {
for (int i = 0; i < valueArray.length; i++) {
backendValues.append((String) valueArray[i]);
if (i < valueArray.length-1) {
backendValues.append("=");
}
}
}
return new PartitionKey(backendValues.toString(), partitionKeyInternal);
} | if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) { | public PartitionKey build() {
if(this.partitionKeyValues.size() == 0) {
throw new IllegalArgumentException("No partition key value has been specified");
}
if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) {
return PartitionKey.NONE;
}
if(this.partitionKeyValues.size() > 1 && this.partitionKeyValues.contains(PartitionKey.NONE)) {
throw new IllegalStateException("PartitionKey.None can't be used with multiple paths");
}
PartitionKeyInternal partitionKeyInternal;
Object[] valueArray = new Object[this.partitionKeyValues.size()];
for(int i = 0; i < this.partitionKeyValues.size(); i++) {
Object val = this.partitionKeyValues.get(i);
if(PartitionKey.NONE.equals(val)) {
valueArray[i] = Undefined.value();
}
else {
valueArray[i] = val;
}
}
partitionKeyInternal = PartitionKeyInternal.fromObjectArray(valueArray, true);
StringBuilder backendValues = new StringBuilder();
if (valueArray.length == 1) {
backendValues.append((String) valueArray[0]);
} else {
for (int i = 0; i < valueArray.length; i++) {
backendValues.append((String) valueArray[i]);
if (i < valueArray.length-1) {
backendValues.append("=");
}
}
}
return new PartitionKey(backendValues.toString(), partitionKeyInternal);
} | class PartitionKeyBuilder {
private final List<Object> partitionKeyValues;
/**
* Constructor. CREATE a new instance of the PartitionKeyBuilder object.
*/
public PartitionKeyBuilder() {
this.partitionKeyValues = new ArrayList<Object>();
}
/**
* Adds partition value of type string
* @param value The value of type string to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(String value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type double
* @param value The value of type double to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(double value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type boolean
* @param value The value of type boolean to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(boolean value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds a null partition key value
* @return The current PartitionKeyBuilder object
* @deprecated Null value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNullValue() {
this.partitionKeyValues.add(null);
return this;
}
/**
* Adds a None Partition Key value to the path. An error will be raised if used with other paths.
* @return The current PartitionKeyBuilder object
* @deprecated PartitionKey.None value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNoneValue() {
this.partitionKeyValues.add(PartitionKey.NONE);
return this;
}
/**
* Builds a new instance of the type PartitionKey with the specified Partition Key values.
* @return PartitionKey object
* @throws IllegalStateException when using PartitionKey.None with other values
*/
} | class PartitionKeyBuilder {
private final List<Object> partitionKeyValues;
/**
* Constructor. CREATE a new instance of the PartitionKeyBuilder object.
*/
public PartitionKeyBuilder() {
this.partitionKeyValues = new ArrayList<Object>();
}
/**
* Adds partition value of type string
* @param value The value of type string to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(String value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type double
* @param value The value of type double to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(double value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type boolean
* @param value The value of type boolean to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(boolean value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds a null partition key value
* @return The current PartitionKeyBuilder object
* @deprecated Null value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNullValue() {
this.partitionKeyValues.add(null);
return this;
}
/**
* Adds a None Partition Key value to the path. An error will be raised if used with other paths.
* @return The current PartitionKeyBuilder object
* @deprecated PartitionKey.None value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNoneValue() {
this.partitionKeyValues.add(PartitionKey.NONE);
return this;
}
/**
* Builds a new instance of the type PartitionKey with the specified Partition Key values.
* @return PartitionKey object
* @throws IllegalStateException when using PartitionKey.None with other values
*/
} |
does this need to be added in the @throw as well? | public PartitionKey build() {
if(this.partitionKeyValues.size() == 0) {
throw new IllegalArgumentException("No partition key value has been specified");
}
if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) {
return PartitionKey.NONE;
}
if(this.partitionKeyValues.size() > 1 && this.partitionKeyValues.contains(PartitionKey.NONE)) {
throw new IllegalStateException("PartitionKey.None can't be used with multiple paths");
}
PartitionKeyInternal partitionKeyInternal;
Object[] valueArray = new Object[this.partitionKeyValues.size()];
for(int i = 0; i < this.partitionKeyValues.size(); i++) {
Object val = this.partitionKeyValues.get(i);
if(PartitionKey.NONE.equals(val)) {
valueArray[i] = Undefined.value();
}
else {
valueArray[i] = val;
}
}
partitionKeyInternal = PartitionKeyInternal.fromObjectArray(valueArray, true);
StringBuilder backendValues = new StringBuilder();
if (valueArray.length == 1) {
backendValues.append((String) valueArray[0]);
} else {
for (int i = 0; i < valueArray.length; i++) {
backendValues.append((String) valueArray[i]);
if (i < valueArray.length-1) {
backendValues.append("=");
}
}
}
return new PartitionKey(backendValues.toString(), partitionKeyInternal);
} | throw new IllegalArgumentException("No partition key value has been specified"); | public PartitionKey build() {
if(this.partitionKeyValues.size() == 0) {
throw new IllegalArgumentException("No partition key value has been specified");
}
if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) {
return PartitionKey.NONE;
}
if(this.partitionKeyValues.size() > 1 && this.partitionKeyValues.contains(PartitionKey.NONE)) {
throw new IllegalStateException("PartitionKey.None can't be used with multiple paths");
}
PartitionKeyInternal partitionKeyInternal;
Object[] valueArray = new Object[this.partitionKeyValues.size()];
for(int i = 0; i < this.partitionKeyValues.size(); i++) {
Object val = this.partitionKeyValues.get(i);
if(PartitionKey.NONE.equals(val)) {
valueArray[i] = Undefined.value();
}
else {
valueArray[i] = val;
}
}
partitionKeyInternal = PartitionKeyInternal.fromObjectArray(valueArray, true);
StringBuilder backendValues = new StringBuilder();
if (valueArray.length == 1) {
backendValues.append((String) valueArray[0]);
} else {
for (int i = 0; i < valueArray.length; i++) {
backendValues.append((String) valueArray[i]);
if (i < valueArray.length-1) {
backendValues.append("=");
}
}
}
return new PartitionKey(backendValues.toString(), partitionKeyInternal);
} | class PartitionKeyBuilder {
private final List<Object> partitionKeyValues;
/**
* Constructor. CREATE a new instance of the PartitionKeyBuilder object.
*/
public PartitionKeyBuilder() {
this.partitionKeyValues = new ArrayList<Object>();
}
/**
* Adds partition value of type string
* @param value The value of type string to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(String value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type double
* @param value The value of type double to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(double value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type boolean
* @param value The value of type boolean to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(boolean value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds a null partition key value
* @return The current PartitionKeyBuilder object
* @deprecated Null value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNullValue() {
this.partitionKeyValues.add(null);
return this;
}
/**
* Adds a None Partition Key value to the path. An error will be raised if used with other paths.
* @return The current PartitionKeyBuilder object
* @deprecated PartitionKey.None value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNoneValue() {
this.partitionKeyValues.add(PartitionKey.NONE);
return this;
}
/**
* Builds a new instance of the type PartitionKey with the specified Partition Key values.
* @return PartitionKey object
* @throws IllegalStateException when using PartitionKey.None with other values
*/
} | class PartitionKeyBuilder {
private final List<Object> partitionKeyValues;
/**
* Constructor. CREATE a new instance of the PartitionKeyBuilder object.
*/
public PartitionKeyBuilder() {
this.partitionKeyValues = new ArrayList<Object>();
}
/**
* Adds partition value of type string
* @param value The value of type string to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(String value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type double
* @param value The value of type double to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(double value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type boolean
* @param value The value of type boolean to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(boolean value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds a null partition key value
* @return The current PartitionKeyBuilder object
* @deprecated Null value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNullValue() {
this.partitionKeyValues.add(null);
return this;
}
/**
* Adds a None Partition Key value to the path. An error will be raised if used with other paths.
* @return The current PartitionKeyBuilder object
* @deprecated PartitionKey.None value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNoneValue() {
this.partitionKeyValues.add(PartitionKey.NONE);
return this;
}
/**
* Builds a new instance of the type PartitionKey with the specified Partition Key values.
* @return PartitionKey object
* @throws IllegalStateException when using PartitionKey.None with other values
*/
} |
Nope, this is just an explanation on the behavior of PartitionKey.None value, not about any particular behavior specific in the builder. The throws also already explains what causes it and the exception informs the user as well. | public PartitionKey build() {
if(this.partitionKeyValues.size() == 0) {
throw new IllegalArgumentException("No partition key value has been specified");
}
if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) {
return PartitionKey.NONE;
}
if(this.partitionKeyValues.size() > 1 && this.partitionKeyValues.contains(PartitionKey.NONE)) {
throw new IllegalStateException("PartitionKey.None can't be used with multiple paths");
}
PartitionKeyInternal partitionKeyInternal;
Object[] valueArray = new Object[this.partitionKeyValues.size()];
for(int i = 0; i < this.partitionKeyValues.size(); i++) {
Object val = this.partitionKeyValues.get(i);
if(PartitionKey.NONE.equals(val)) {
valueArray[i] = Undefined.value();
}
else {
valueArray[i] = val;
}
}
partitionKeyInternal = PartitionKeyInternal.fromObjectArray(valueArray, true);
StringBuilder backendValues = new StringBuilder();
if (valueArray.length == 1) {
backendValues.append((String) valueArray[0]);
} else {
for (int i = 0; i < valueArray.length; i++) {
backendValues.append((String) valueArray[i]);
if (i < valueArray.length-1) {
backendValues.append("=");
}
}
}
return new PartitionKey(backendValues.toString(), partitionKeyInternal);
} | throw new IllegalArgumentException("No partition key value has been specified"); | public PartitionKey build() {
if(this.partitionKeyValues.size() == 0) {
throw new IllegalArgumentException("No partition key value has been specified");
}
if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) {
return PartitionKey.NONE;
}
if(this.partitionKeyValues.size() > 1 && this.partitionKeyValues.contains(PartitionKey.NONE)) {
throw new IllegalStateException("PartitionKey.None can't be used with multiple paths");
}
PartitionKeyInternal partitionKeyInternal;
Object[] valueArray = new Object[this.partitionKeyValues.size()];
for(int i = 0; i < this.partitionKeyValues.size(); i++) {
Object val = this.partitionKeyValues.get(i);
if(PartitionKey.NONE.equals(val)) {
valueArray[i] = Undefined.value();
}
else {
valueArray[i] = val;
}
}
partitionKeyInternal = PartitionKeyInternal.fromObjectArray(valueArray, true);
StringBuilder backendValues = new StringBuilder();
if (valueArray.length == 1) {
backendValues.append((String) valueArray[0]);
} else {
for (int i = 0; i < valueArray.length; i++) {
backendValues.append((String) valueArray[i]);
if (i < valueArray.length-1) {
backendValues.append("=");
}
}
}
return new PartitionKey(backendValues.toString(), partitionKeyInternal);
} | class PartitionKeyBuilder {
private final List<Object> partitionKeyValues;
/**
* Constructor. CREATE a new instance of the PartitionKeyBuilder object.
*/
public PartitionKeyBuilder() {
this.partitionKeyValues = new ArrayList<Object>();
}
/**
* Adds partition value of type string
* @param value The value of type string to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(String value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type double
* @param value The value of type double to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(double value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type boolean
* @param value The value of type boolean to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(boolean value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds a null partition key value
* @return The current PartitionKeyBuilder object
* @deprecated Null value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNullValue() {
this.partitionKeyValues.add(null);
return this;
}
/**
* Adds a None Partition Key value to the path. An error will be raised if used with other paths.
* @return The current PartitionKeyBuilder object
* @deprecated PartitionKey.None value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNoneValue() {
this.partitionKeyValues.add(PartitionKey.NONE);
return this;
}
/**
* Builds a new instance of the type PartitionKey with the specified Partition Key values.
* @return PartitionKey object
* @throws IllegalStateException when using PartitionKey.None with other values
*/
} | class PartitionKeyBuilder {
private final List<Object> partitionKeyValues;
/**
* Constructor. CREATE a new instance of the PartitionKeyBuilder object.
*/
public PartitionKeyBuilder() {
this.partitionKeyValues = new ArrayList<Object>();
}
/**
* Adds partition value of type string
* @param value The value of type string to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(String value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type double
* @param value The value of type double to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(double value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type boolean
* @param value The value of type boolean to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(boolean value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds a null partition key value
* @return The current PartitionKeyBuilder object
* @deprecated Null value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNullValue() {
this.partitionKeyValues.add(null);
return this;
}
/**
* Adds a None Partition Key value to the path. An error will be raised if used with other paths.
* @return The current PartitionKeyBuilder object
* @deprecated PartitionKey.None value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNoneValue() {
this.partitionKeyValues.add(PartitionKey.NONE);
return this;
}
/**
* Builds a new instance of the type PartitionKey with the specified Partition Key values.
* @return PartitionKey object
* @throws IllegalStateException when using PartitionKey.None with other values
*/
} |
it will since none of the other add methods allow the PartitionKeyBuilder to take in a PartitionKey.None | public PartitionKey build() {
if(this.partitionKeyValues.size() == 0) {
throw new IllegalArgumentException("No partition key value has been specified");
}
if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) {
return PartitionKey.NONE;
}
if(this.partitionKeyValues.size() > 1 && this.partitionKeyValues.contains(PartitionKey.NONE)) {
throw new IllegalStateException("PartitionKey.None can't be used with multiple paths");
}
PartitionKeyInternal partitionKeyInternal;
Object[] valueArray = new Object[this.partitionKeyValues.size()];
for(int i = 0; i < this.partitionKeyValues.size(); i++) {
Object val = this.partitionKeyValues.get(i);
if(PartitionKey.NONE.equals(val)) {
valueArray[i] = Undefined.value();
}
else {
valueArray[i] = val;
}
}
partitionKeyInternal = PartitionKeyInternal.fromObjectArray(valueArray, true);
StringBuilder backendValues = new StringBuilder();
if (valueArray.length == 1) {
backendValues.append((String) valueArray[0]);
} else {
for (int i = 0; i < valueArray.length; i++) {
backendValues.append((String) valueArray[i]);
if (i < valueArray.length-1) {
backendValues.append("=");
}
}
}
return new PartitionKey(backendValues.toString(), partitionKeyInternal);
} | if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) { | public PartitionKey build() {
if(this.partitionKeyValues.size() == 0) {
throw new IllegalArgumentException("No partition key value has been specified");
}
if(this.partitionKeyValues.size() == 1 && PartitionKey.NONE.equals(this.partitionKeyValues.get(0))) {
return PartitionKey.NONE;
}
if(this.partitionKeyValues.size() > 1 && this.partitionKeyValues.contains(PartitionKey.NONE)) {
throw new IllegalStateException("PartitionKey.None can't be used with multiple paths");
}
PartitionKeyInternal partitionKeyInternal;
Object[] valueArray = new Object[this.partitionKeyValues.size()];
for(int i = 0; i < this.partitionKeyValues.size(); i++) {
Object val = this.partitionKeyValues.get(i);
if(PartitionKey.NONE.equals(val)) {
valueArray[i] = Undefined.value();
}
else {
valueArray[i] = val;
}
}
partitionKeyInternal = PartitionKeyInternal.fromObjectArray(valueArray, true);
StringBuilder backendValues = new StringBuilder();
if (valueArray.length == 1) {
backendValues.append((String) valueArray[0]);
} else {
for (int i = 0; i < valueArray.length; i++) {
backendValues.append((String) valueArray[i]);
if (i < valueArray.length-1) {
backendValues.append("=");
}
}
}
return new PartitionKey(backendValues.toString(), partitionKeyInternal);
} | class PartitionKeyBuilder {
private final List<Object> partitionKeyValues;
/**
* Constructor. CREATE a new instance of the PartitionKeyBuilder object.
*/
public PartitionKeyBuilder() {
this.partitionKeyValues = new ArrayList<Object>();
}
/**
* Adds partition value of type string
* @param value The value of type string to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(String value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type double
* @param value The value of type double to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(double value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type boolean
* @param value The value of type boolean to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(boolean value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds a null partition key value
* @return The current PartitionKeyBuilder object
* @deprecated Null value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNullValue() {
this.partitionKeyValues.add(null);
return this;
}
/**
* Adds a None Partition Key value to the path. An error will be raised if used with other paths.
* @return The current PartitionKeyBuilder object
* @deprecated PartitionKey.None value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNoneValue() {
this.partitionKeyValues.add(PartitionKey.NONE);
return this;
}
/**
* Builds a new instance of the type PartitionKey with the specified Partition Key values.
* @return PartitionKey object
* @throws IllegalStateException when using PartitionKey.None with other values
*/
} | class PartitionKeyBuilder {
private final List<Object> partitionKeyValues;
/**
* Constructor. CREATE a new instance of the PartitionKeyBuilder object.
*/
public PartitionKeyBuilder() {
this.partitionKeyValues = new ArrayList<Object>();
}
/**
* Adds partition value of type string
* @param value The value of type string to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(String value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type double
* @param value The value of type double to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(double value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds partition value of type boolean
* @param value The value of type boolean to be used as partition key
* @return The current PartitionKeyBuilder object
*/
public PartitionKeyBuilder add(boolean value) {
this.partitionKeyValues.add(value);
return this;
}
/**
* Adds a null partition key value
* @return The current PartitionKeyBuilder object
* @deprecated Null value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNullValue() {
this.partitionKeyValues.add(null);
return this;
}
/**
* Adds a None Partition Key value to the path. An error will be raised if used with other paths.
* @return The current PartitionKeyBuilder object
* @deprecated PartitionKey.None value should only be used with PartitionKey constructor.
*/
@Deprecated
public PartitionKeyBuilder addNoneValue() {
this.partitionKeyValues.add(PartitionKey.NONE);
return this;
}
/**
* Builds a new instance of the type PartitionKey with the specified Partition Key values.
* @return PartitionKey object
* @throws IllegalStateException when using PartitionKey.None with other values
*/
} |
I'm not sure why this test (which I know you copied) only validates the second, but it should probably validate the whole timestamp or something? If I'm right about the timezone thing, it would have caught it here, so we should enhance this. | public void testUseAzureDeveloperCliCredential() throws Exception {
String token1 = "token1";
TokenRequestContext request = new TokenRequestContext().addScopes("https:
OffsetDateTime expiresAt = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1);
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<IdentityClient> mocked = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithAzureDeveloperCli(request)).thenReturn(TestUtils.getMockAccessToken(token1, expiresAt));
when(identityClient.authenticateWithAzureCli(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithManagedIdentityConfidentialClient(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithSharedTokenCache(request, null)).thenReturn(Mono.empty());
when(identityClient.authenticateWithIntelliJ(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithVsCodeCredential(any(), any())).thenReturn(Mono.empty());
}); MockedConstruction<IntelliJCredential> ijcredential = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.empty());
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresAt.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete();
Assert.assertNotNull(mocked);
Assert.assertNotNull(ijcredential);
}
} | StepVerifier.create(credential.getToken(request)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresAt.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete(); | public void testUseAzureDeveloperCliCredential() throws Exception {
String token1 = "token1";
TokenRequestContext request = new TokenRequestContext().addScopes("https:
OffsetDateTime expiresAt = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1);
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<IdentityClient> mocked = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithAzureDeveloperCli(request)).thenReturn(TestUtils.getMockAccessToken(token1, expiresAt));
when(identityClient.authenticateWithAzureCli(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithManagedIdentityConfidentialClient(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithSharedTokenCache(request, null)).thenReturn(Mono.empty());
when(identityClient.authenticateWithIntelliJ(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithVsCodeCredential(any(), any())).thenReturn(Mono.empty());
}); MockedConstruction<IntelliJCredential> ijcredential = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.empty());
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresAt.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete();
Assert.assertNotNull(mocked);
Assert.assertNotNull(ijcredential);
}
} | class DefaultAzureCredentialTest {
private static final String TENANT_ID = "contoso.com";
private static final String CLIENT_ID = UUID.randomUUID().toString();
@Test
public void testUseEnvironmentCredential() throws Exception {
Configuration configuration = Configuration.getGlobalConfiguration().clone();
String secret = "secret";
String token1 = "token1";
TokenRequestContext request1 = new TokenRequestContext().addScopes("https:
OffsetDateTime expiresOn = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1);
configuration.put("AZURE_CLIENT_ID", CLIENT_ID);
configuration.put("AZURE_CLIENT_SECRET", secret);
configuration.put("AZURE_TENANT_ID", TENANT_ID);
try (MockedConstruction<IdentityClient> mocked = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithAzureDeveloperCli(request1)).thenReturn(Mono.empty());
when(identityClient.authenticateWithConfidentialClientCache(any())).thenReturn(Mono.empty());
when(identityClient.authenticateWithConfidentialClient(request1)).thenReturn(TestUtils.getMockAccessToken(token1, expiresOn));
}); MockedConstruction<IntelliJCredential> ijcredential = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request1)).thenReturn(Mono.empty());
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request1)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresOn.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete();
Assert.assertNotNull(mocked);
Assert.assertNotNull(ijcredential);
}
}
@Test
public void testUseManagedIdentityCredential() throws Exception {
String token1 = "token1";
TokenRequestContext request = new TokenRequestContext().addScopes("https:
OffsetDateTime expiresAt = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1);
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<IdentityClient> mocked = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithAzureDeveloperCli(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithManagedIdentityConfidentialClient(request)).thenReturn(TestUtils.getMockAccessToken(token1, expiresAt));
}); MockedConstruction<IntelliJCredential> ijcredential = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.empty());
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresAt.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete();
Assert.assertNotNull(mocked);
Assert.assertNotNull(ijcredential);
}
}
@Test
public void testUseAzureCliCredential() throws Exception {
String token1 = "token1";
TokenRequestContext request = new TokenRequestContext().addScopes("https:
OffsetDateTime expiresAt = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1);
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<IdentityClient> mocked = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithAzureDeveloperCli(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithAzureCli(request)).thenReturn(TestUtils.getMockAccessToken(token1, expiresAt));
when(identityClient.authenticateWithManagedIdentityConfidentialClient(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithSharedTokenCache(request, null)).thenReturn(Mono.empty());
when(identityClient.authenticateWithIntelliJ(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithVsCodeCredential(any(), any())).thenReturn(Mono.empty());
}); MockedConstruction<IntelliJCredential> ijcredential = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.empty());
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresAt.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete();
Assert.assertNotNull(mocked);
Assert.assertNotNull(ijcredential);
}
}
@Test
@Test
public void testNoCredentialWorks() throws Exception {
TokenRequestContext request = new TokenRequestContext().addScopes("https:
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<IdentityClient> identityClientMock = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithManagedIdentityConfidentialClient(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from managed identity")));
}); MockedConstruction<SharedTokenCacheCredential> sharedTokenCacheCredentialMock = mockConstruction(SharedTokenCacheCredential.class, (sharedTokenCacheCredential, context) -> {
when(sharedTokenCacheCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from shared token cache")));
}); MockedConstruction<AzureDeveloperCliCredential> azureDeveloperCliCredentialMock = mockConstruction(AzureDeveloperCliCredential.class, (AzureDeveloperCliCredential, context) -> {
when(AzureDeveloperCliCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Azure Developer CLI credential")));
}); MockedConstruction<AzureCliCredential> azureCliCredentialMock = mockConstruction(AzureCliCredential.class, (azureCliCredential, context) -> {
when(azureCliCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Azure CLI credential")));
}); MockedConstruction<AzurePowerShellCredential> azurePowerShellCredentialMock = mockConstruction(AzurePowerShellCredential.class, (azurePowerShellCredential, context) -> {
when(azurePowerShellCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Azure PowerShell credential")));
}); MockedConstruction<IntelliJCredential> intelliJCredentialMock = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from IntelliJ Credential")));
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectErrorMatches(t -> t instanceof CredentialUnavailableException && t.getMessage().startsWith("EnvironmentCredential authentication unavailable. ")).verify();
Assert.assertNotNull(identityClientMock);
Assert.assertNotNull(sharedTokenCacheCredentialMock);
Assert.assertNotNull(azureCliCredentialMock);
Assert.assertNotNull(azureDeveloperCliCredentialMock);
Assert.assertNotNull(azurePowerShellCredentialMock);
Assert.assertNotNull(intelliJCredentialMock);
}
}
@Test
public void testCredentialUnavailable() throws Exception {
TokenRequestContext request = new TokenRequestContext().addScopes("https:
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<ManagedIdentityCredential> managedIdentityCredentialMock = mockConstruction(ManagedIdentityCredential.class, (managedIdentityCredential, context) -> {
when(managedIdentityCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Managed Identity credential")));
}); MockedConstruction<IntelliJCredential> intelliJCredentialMock = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from IntelliJ Credential")));
}); MockedConstruction<AzurePowerShellCredential> powerShellCredentialMock = mockConstruction(AzurePowerShellCredential.class, (powerShellCredential, context) -> {
when(powerShellCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Powershell credential")));
}); MockedConstruction<AzureCliCredential> azureCliCredentialMock = mockConstruction(AzureCliCredential.class, (azureCliCredential, context) -> {
when(azureCliCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Cli credential")));
}); MockedConstruction<AzureDeveloperCliCredential> azureDeveloperCliCredentialMock = mockConstruction(AzureDeveloperCliCredential.class, (AzureDeveloperCliCredential, context) -> {
when(AzureDeveloperCliCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Azure Developer CLI credential")));
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectErrorMatches(t -> t instanceof CredentialUnavailableException && t.getMessage().startsWith("EnvironmentCredential authentication unavailable. ")).verify();
Assert.assertNotNull(managedIdentityCredentialMock);
Assert.assertNotNull(intelliJCredentialMock);
Assert.assertNotNull(powerShellCredentialMock);
Assert.assertNotNull(azureCliCredentialMock);
Assert.assertNotNull(azureDeveloperCliCredentialMock);
}
}
@Test
public void testCredentialUnavailableSync() throws Exception {
TokenRequestContext request = new TokenRequestContext().addScopes("https:
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<ManagedIdentityCredential> managedIdentityCredentialMock = mockConstruction(ManagedIdentityCredential.class, (managedIdentityCredential, context) -> {
when(managedIdentityCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from Managed Identity credential"));
}); MockedConstruction<IntelliJCredential> intelliJCredentialMock = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from IntelliJ Credential"));
}); MockedConstruction<AzurePowerShellCredential> powerShellCredentialMock = mockConstruction(AzurePowerShellCredential.class, (powerShellCredential, context) -> {
when(powerShellCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from Powershell credential"));
}); MockedConstruction<AzureCliCredential> azureCliCredentialMock = mockConstruction(AzureCliCredential.class, (azureCliCredential, context) -> {
when(azureCliCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from Cli credential"));
}); MockedConstruction<AzureDeveloperCliCredential> azureDeveloperCliCredentialMock = mockConstruction(AzureDeveloperCliCredential.class, (azureDeveloperCliCredential, context) -> {
when(azureDeveloperCliCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from Azure Developer Cli credential"));
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
try {
credential.getTokenSync(request);
} catch (Exception e) {
Assert.assertTrue(e instanceof CredentialUnavailableException && e.getMessage().startsWith("EnvironmentCredential authentication unavailable. "));
}
Assert.assertNotNull(managedIdentityCredentialMock);
Assert.assertNotNull(intelliJCredentialMock);
Assert.assertNotNull(powerShellCredentialMock);
Assert.assertNotNull(azureCliCredentialMock);
Assert.assertNotNull(azureDeveloperCliCredentialMock);
}
}
@Test(expected = IllegalStateException.class)
public void testInvalidIdCombination() {
String resourceId = "/subscriptions/" + UUID.randomUUID() + "/resourcegroups/aresourcegroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/ident";
new DefaultAzureCredentialBuilder().managedIdentityClientId(CLIENT_ID).managedIdentityResourceId(resourceId).build();
}
@Test
public void testInvalidAdditionalTenant() throws Exception {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar")
.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
TokenRequestContext request = new TokenRequestContext().addScopes("https:
.setTenantId("newTenant");
DefaultAzureCredential credential =
new DefaultAzureCredentialBuilder().additionallyAllowedTenants("RANDOM").build();
StepVerifier.create(credential.getToken(request))
.expectErrorMatches(e -> e instanceof ClientAuthenticationException && (e.getMessage().contains("The current credential is not configured to")))
.verify();
}
@Test
public void testInvalidMultiTenantAuth() throws Exception {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar")
.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
TokenRequestContext request = new TokenRequestContext().addScopes("https:
.setTenantId("newTenant");
DefaultAzureCredential credential =
new DefaultAzureCredentialBuilder().build();
StepVerifier.create(credential.getToken(request))
.expectErrorMatches(e -> e instanceof ClientAuthenticationException && (e.getMessage().contains("The current credential is not configured to")))
.verify();
}
@Test
public void testValidMultiTenantAuth() throws Exception {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar")
.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
TokenRequestContext request = new TokenRequestContext().addScopes("https:
.setTenantId("newTenant");
DefaultAzureCredential credential =
new DefaultAzureCredentialBuilder().additionallyAllowedTenants("*").build();
StepVerifier.create(credential.getToken(request))
.expectErrorMatches(e -> e.getCause() instanceof MsalServiceException)
.verify();
}
} | class DefaultAzureCredentialTest {
private static final String TENANT_ID = "contoso.com";
private static final String CLIENT_ID = UUID.randomUUID().toString();
@Test
public void testUseEnvironmentCredential() throws Exception {
Configuration configuration = Configuration.getGlobalConfiguration().clone();
String secret = "secret";
String token1 = "token1";
TokenRequestContext request1 = new TokenRequestContext().addScopes("https:
OffsetDateTime expiresOn = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1);
configuration.put("AZURE_CLIENT_ID", CLIENT_ID);
configuration.put("AZURE_CLIENT_SECRET", secret);
configuration.put("AZURE_TENANT_ID", TENANT_ID);
try (MockedConstruction<IdentityClient> mocked = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithAzureDeveloperCli(request1)).thenReturn(Mono.empty());
when(identityClient.authenticateWithConfidentialClientCache(any())).thenReturn(Mono.empty());
when(identityClient.authenticateWithConfidentialClient(request1)).thenReturn(TestUtils.getMockAccessToken(token1, expiresOn));
}); MockedConstruction<IntelliJCredential> ijcredential = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request1)).thenReturn(Mono.empty());
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request1)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresOn.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete();
Assert.assertNotNull(mocked);
Assert.assertNotNull(ijcredential);
}
}
@Test
public void testUseManagedIdentityCredential() throws Exception {
String token1 = "token1";
TokenRequestContext request = new TokenRequestContext().addScopes("https:
OffsetDateTime expiresAt = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1);
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<IdentityClient> mocked = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithAzureDeveloperCli(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithManagedIdentityConfidentialClient(request)).thenReturn(TestUtils.getMockAccessToken(token1, expiresAt));
}); MockedConstruction<IntelliJCredential> ijcredential = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.empty());
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresAt.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete();
Assert.assertNotNull(mocked);
Assert.assertNotNull(ijcredential);
}
}
@Test
public void testUseAzureCliCredential() throws Exception {
String token1 = "token1";
TokenRequestContext request = new TokenRequestContext().addScopes("https:
OffsetDateTime expiresAt = OffsetDateTime.now(ZoneOffset.UTC).plusHours(1);
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<IdentityClient> mocked = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithAzureDeveloperCli(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithAzureCli(request)).thenReturn(TestUtils.getMockAccessToken(token1, expiresAt));
when(identityClient.authenticateWithManagedIdentityConfidentialClient(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithSharedTokenCache(request, null)).thenReturn(Mono.empty());
when(identityClient.authenticateWithIntelliJ(request)).thenReturn(Mono.empty());
when(identityClient.authenticateWithVsCodeCredential(any(), any())).thenReturn(Mono.empty());
}); MockedConstruction<IntelliJCredential> ijcredential = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.empty());
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectNextMatches(accessToken -> token1.equals(accessToken.getToken()) && expiresAt.getSecond() == accessToken.getExpiresAt().getSecond()).verifyComplete();
Assert.assertNotNull(mocked);
Assert.assertNotNull(ijcredential);
}
}
@Test
@Test
public void testNoCredentialWorks() throws Exception {
TokenRequestContext request = new TokenRequestContext().addScopes("https:
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<IdentityClient> identityClientMock = mockConstruction(IdentityClient.class, (identityClient, context) -> {
when(identityClient.authenticateWithManagedIdentityConfidentialClient(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from managed identity")));
}); MockedConstruction<SharedTokenCacheCredential> sharedTokenCacheCredentialMock = mockConstruction(SharedTokenCacheCredential.class, (sharedTokenCacheCredential, context) -> {
when(sharedTokenCacheCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from shared token cache")));
}); MockedConstruction<AzureDeveloperCliCredential> azureDeveloperCliCredentialMock = mockConstruction(AzureDeveloperCliCredential.class, (AzureDeveloperCliCredential, context) -> {
when(AzureDeveloperCliCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Azure Developer CLI credential")));
}); MockedConstruction<AzureCliCredential> azureCliCredentialMock = mockConstruction(AzureCliCredential.class, (azureCliCredential, context) -> {
when(azureCliCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Azure CLI credential")));
}); MockedConstruction<AzurePowerShellCredential> azurePowerShellCredentialMock = mockConstruction(AzurePowerShellCredential.class, (azurePowerShellCredential, context) -> {
when(azurePowerShellCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Azure PowerShell credential")));
}); MockedConstruction<IntelliJCredential> intelliJCredentialMock = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from IntelliJ Credential")));
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectErrorMatches(t -> t instanceof CredentialUnavailableException && t.getMessage().startsWith("EnvironmentCredential authentication unavailable. ")).verify();
Assert.assertNotNull(identityClientMock);
Assert.assertNotNull(sharedTokenCacheCredentialMock);
Assert.assertNotNull(azureCliCredentialMock);
Assert.assertNotNull(azureDeveloperCliCredentialMock);
Assert.assertNotNull(azurePowerShellCredentialMock);
Assert.assertNotNull(intelliJCredentialMock);
}
}
@Test
public void testCredentialUnavailable() throws Exception {
TokenRequestContext request = new TokenRequestContext().addScopes("https:
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<ManagedIdentityCredential> managedIdentityCredentialMock = mockConstruction(ManagedIdentityCredential.class, (managedIdentityCredential, context) -> {
when(managedIdentityCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Managed Identity credential")));
}); MockedConstruction<IntelliJCredential> intelliJCredentialMock = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from IntelliJ Credential")));
}); MockedConstruction<AzurePowerShellCredential> powerShellCredentialMock = mockConstruction(AzurePowerShellCredential.class, (powerShellCredential, context) -> {
when(powerShellCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Powershell credential")));
}); MockedConstruction<AzureCliCredential> azureCliCredentialMock = mockConstruction(AzureCliCredential.class, (azureCliCredential, context) -> {
when(azureCliCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Cli credential")));
}); MockedConstruction<AzureDeveloperCliCredential> azureDeveloperCliCredentialMock = mockConstruction(AzureDeveloperCliCredential.class, (AzureDeveloperCliCredential, context) -> {
when(AzureDeveloperCliCredential.getToken(request)).thenReturn(Mono.error(new CredentialUnavailableException("Cannot get token from Azure Developer CLI credential")));
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
StepVerifier.create(credential.getToken(request)).expectErrorMatches(t -> t instanceof CredentialUnavailableException && t.getMessage().startsWith("EnvironmentCredential authentication unavailable. ")).verify();
Assert.assertNotNull(managedIdentityCredentialMock);
Assert.assertNotNull(intelliJCredentialMock);
Assert.assertNotNull(powerShellCredentialMock);
Assert.assertNotNull(azureCliCredentialMock);
Assert.assertNotNull(azureDeveloperCliCredentialMock);
}
}
@Test
public void testCredentialUnavailableSync() throws Exception {
TokenRequestContext request = new TokenRequestContext().addScopes("https:
EmptyEnvironmentConfigurationSource source = new EmptyEnvironmentConfigurationSource();
Configuration configuration = new ConfigurationBuilder(source, source, source).build();
try (MockedConstruction<ManagedIdentityCredential> managedIdentityCredentialMock = mockConstruction(ManagedIdentityCredential.class, (managedIdentityCredential, context) -> {
when(managedIdentityCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from Managed Identity credential"));
}); MockedConstruction<IntelliJCredential> intelliJCredentialMock = mockConstruction(IntelliJCredential.class, (intelliJCredential, context) -> {
when(intelliJCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from IntelliJ Credential"));
}); MockedConstruction<AzurePowerShellCredential> powerShellCredentialMock = mockConstruction(AzurePowerShellCredential.class, (powerShellCredential, context) -> {
when(powerShellCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from Powershell credential"));
}); MockedConstruction<AzureCliCredential> azureCliCredentialMock = mockConstruction(AzureCliCredential.class, (azureCliCredential, context) -> {
when(azureCliCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from Cli credential"));
}); MockedConstruction<AzureDeveloperCliCredential> azureDeveloperCliCredentialMock = mockConstruction(AzureDeveloperCliCredential.class, (azureDeveloperCliCredential, context) -> {
when(azureDeveloperCliCredential.getTokenSync(request)).thenThrow(new CredentialUnavailableException("Cannot get token from Azure Developer Cli credential"));
})) {
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().configuration(configuration).build();
try {
credential.getTokenSync(request);
} catch (Exception e) {
Assert.assertTrue(e instanceof CredentialUnavailableException && e.getMessage().startsWith("EnvironmentCredential authentication unavailable. "));
}
Assert.assertNotNull(managedIdentityCredentialMock);
Assert.assertNotNull(intelliJCredentialMock);
Assert.assertNotNull(powerShellCredentialMock);
Assert.assertNotNull(azureCliCredentialMock);
Assert.assertNotNull(azureDeveloperCliCredentialMock);
}
}
@Test(expected = IllegalStateException.class)
public void testInvalidIdCombination() {
String resourceId = "/subscriptions/" + UUID.randomUUID() + "/resourcegroups/aresourcegroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/ident";
new DefaultAzureCredentialBuilder().managedIdentityClientId(CLIENT_ID).managedIdentityResourceId(resourceId).build();
}
@Test
public void testInvalidAdditionalTenant() throws Exception {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar")
.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
TokenRequestContext request = new TokenRequestContext().addScopes("https:
.setTenantId("newTenant");
DefaultAzureCredential credential =
new DefaultAzureCredentialBuilder().additionallyAllowedTenants("RANDOM").build();
StepVerifier.create(credential.getToken(request))
.expectErrorMatches(e -> e instanceof ClientAuthenticationException && (e.getMessage().contains("The current credential is not configured to")))
.verify();
}
@Test
public void testInvalidMultiTenantAuth() throws Exception {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar")
.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
TokenRequestContext request = new TokenRequestContext().addScopes("https:
.setTenantId("newTenant");
DefaultAzureCredential credential =
new DefaultAzureCredentialBuilder().build();
StepVerifier.create(credential.getToken(request))
.expectErrorMatches(e -> e instanceof ClientAuthenticationException && (e.getMessage().contains("The current credential is not configured to")))
.verify();
}
@Test
public void testValidMultiTenantAuth() throws Exception {
Configuration.getGlobalConfiguration()
.put(Configuration.PROPERTY_AZURE_CLIENT_ID, "foo")
.put(Configuration.PROPERTY_AZURE_CLIENT_SECRET, "bar")
.put(Configuration.PROPERTY_AZURE_TENANT_ID, "baz");
TokenRequestContext request = new TokenRequestContext().addScopes("https:
.setTenantId("newTenant");
DefaultAzureCredential credential =
new DefaultAzureCredentialBuilder().additionallyAllowedTenants("*").build();
StepVerifier.create(credential.getToken(request))
.expectErrorMatches(e -> e.getCause() instanceof MsalServiceException)
.verify();
}
} |
For now we are putting this credential right after `ManagedIdentityCredential`. We may change this after archboard review. In the meantime, we should be consistent with other languages. | private ArrayList<TokenCredential> getCredentialsChain() {
ArrayList<TokenCredential> output = new ArrayList<TokenCredential>(7);
output.add(new EnvironmentCredential(identityClientOptions.clone()));
output.add(new ManagedIdentityCredential(managedIdentityClientId, managedIdentityResourceId, identityClientOptions.clone()));
output.add(new SharedTokenCacheCredential(null, IdentityConstants.DEVELOPER_SINGLE_SIGN_ON_ID,
tenantId, identityClientOptions.clone()));
output.add(new IntelliJCredential(tenantId, identityClientOptions.clone()));
output.add(new AzureDeveloperCliCredential(tenantId, identityClientOptions.clone()));
output.add(new AzureCliCredential(tenantId, identityClientOptions.clone()));
output.add(new AzurePowerShellCredential(tenantId, identityClientOptions.clone()));
return output;
} | output.add(new AzureDeveloperCliCredential(tenantId, identityClientOptions.clone())); | private ArrayList<TokenCredential> getCredentialsChain() {
ArrayList<TokenCredential> output = new ArrayList<TokenCredential>(7);
output.add(new EnvironmentCredential(identityClientOptions.clone()));
output.add(new ManagedIdentityCredential(managedIdentityClientId, managedIdentityResourceId, identityClientOptions.clone()));
output.add(new AzureDeveloperCliCredential(tenantId, identityClientOptions.clone()));
output.add(new SharedTokenCacheCredential(null, IdentityConstants.DEVELOPER_SINGLE_SIGN_ON_ID,
tenantId, identityClientOptions.clone()));
output.add(new IntelliJCredential(tenantId, identityClientOptions.clone()));
output.add(new AzureCliCredential(tenantId, identityClientOptions.clone()));
output.add(new AzurePowerShellCredential(tenantId, identityClientOptions.clone()));
return output;
} | class DefaultAzureCredentialBuilder extends CredentialBuilderBase<DefaultAzureCredentialBuilder> {
private static final ClientLogger LOGGER = new ClientLogger(DefaultAzureCredentialBuilder.class);
private String tenantId;
private String managedIdentityClientId;
private String managedIdentityResourceId;
private List<String> additionallyAllowedTenants = IdentityUtil
.getAdditionalTenantsFromEnvironment(Configuration.getGlobalConfiguration().clone());
/**
* Creates an instance of a DefaultAzureCredentialBuilder.
*/
public DefaultAzureCredentialBuilder() {
Configuration configuration = Configuration.getGlobalConfiguration().clone();
tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
managedIdentityClientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
this.identityClientOptions.setIdentityLogOptionsImpl(new IdentityLogOptionsImpl(true));
}
/**
* Sets the tenant id of the user to authenticate through the {@link DefaultAzureCredential}. If unset, the value
* in the AZURE_TENANT_ID environment variable will be used. If neither is set, the default is null
* and will authenticate users to their default tenant.
*
* @param tenantId the tenant ID to set.
* @return An updated instance of this builder with the tenant id set as specified.
*/
public DefaultAzureCredentialBuilder tenantId(String tenantId) {
this.tenantId = tenantId;
return this;
}
/**
* Specifies the Azure Active Directory endpoint to acquire tokens.
* @param authorityHost the Azure Active Directory endpoint
* @return An updated instance of this builder with the authority host set as specified.
*/
public DefaultAzureCredentialBuilder authorityHost(String authorityHost) {
this.identityClientOptions.setAuthorityHost(authorityHost);
return this;
}
/**
* Specifies the KeePass database path to read the cached credentials of Azure toolkit for IntelliJ plugin.
* The {@code databasePath} is required on Windows platform. For macOS and Linux platform native key chain /
* key ring will be accessed respectively to retrieve the cached credentials.
*
* <p>This path can be located in the IntelliJ IDE.
* Windows: File -> Settings -> Appearance & Behavior -> System Settings -> Passwords. </p>
*
* @param databasePath the path to the KeePass database.
* @throws IllegalArgumentException if {@code databasePath} is either not specified or is empty.
* @return An updated instance of this builder with the KeePass database path set as specified.
*/
public DefaultAzureCredentialBuilder intelliJKeePassDatabasePath(String databasePath) {
if (CoreUtils.isNullOrEmpty(databasePath)) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("The KeePass database path is either empty or not configured."
+ " Please configure it on the builder."));
}
this.identityClientOptions.setIntelliJKeePassDatabasePath(databasePath);
return this;
}
/**
* Specifies the client ID of user assigned or system assigned identity, when this credential is running
* in an environment with managed identities. If unset, the value in the AZURE_CLIENT_ID environment variable
* will be used. If neither is set, the default value is null and will only work with system assigned
* managed identities and not user assigned managed identities.
*
* Only one of managedIdentityClientId and managedIdentityResourceId can be specified.
*
* @param clientId the client ID
* @return the DefaultAzureCredentialBuilder itself
*/
public DefaultAzureCredentialBuilder managedIdentityClientId(String clientId) {
this.managedIdentityClientId = clientId;
return this;
}
/**
* Specifies the resource ID of user assigned or system assigned identity, when this credential is running
* in an environment with managed identities. If unset, the value in the AZURE_CLIENT_ID environment variable
* will be used. If neither is set, the default value is null and will only work with system assigned
* managed identities and not user assigned managed identities.
*
* Only one of managedIdentityResourceId and managedIdentityClientId can be specified.
*
* @param resourceId the resource ID
* @return the DefaultAzureCredentialBuilder itself
*/
public DefaultAzureCredentialBuilder managedIdentityResourceId(String resourceId) {
this.managedIdentityResourceId = resourceId;
return this;
}
/**
* Specifies the ExecutorService to be used to execute the authentication requests.
* Developer is responsible for maintaining the lifecycle of the ExecutorService.
*
* <p>
* If this is not configured, the {@link ForkJoinPool
* also shared with other application tasks. If the common pool is heavily used for other tasks, authentication
* requests might starve and setting up this executor service should be considered.
* </p>
*
* <p> The executor service and can be safely shutdown if the TokenCredential is no longer being used by the
* Azure SDK clients and should be shutdown before the application exits. </p>
*
* @param executorService the executor service to use for executing authentication requests.
* @return An updated instance of this builder with the executor service set as specified.
*/
public DefaultAzureCredentialBuilder executorService(ExecutorService executorService) {
this.identityClientOptions.setExecutorService(executorService);
return this;
}
/**
* For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens.
* Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application is installed.
*
* @param additionallyAllowedTenants the additionally allowed tenants.
* @return An updated instance of this builder with the tenant id set as specified.
*/
@SuppressWarnings("unchecked")
public DefaultAzureCredentialBuilder additionallyAllowedTenants(String... additionallyAllowedTenants) {
this.additionallyAllowedTenants = IdentityUtil.resolveAdditionalTenants(Arrays.asList(additionallyAllowedTenants));
return this;
}
/**
* For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens.
* Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application is installed.
*
* @param additionallyAllowedTenants the additionally allowed tenants.
* @return An updated instance of this builder with the tenant id set as specified.
*/
@SuppressWarnings("unchecked")
public DefaultAzureCredentialBuilder additionallyAllowedTenants(List<String> additionallyAllowedTenants) {
this.additionallyAllowedTenants = IdentityUtil.resolveAdditionalTenants(additionallyAllowedTenants);
return this;
}
/**
* Creates new {@link DefaultAzureCredential} with the configured options set.
*
* @return a {@link DefaultAzureCredential} with the current configurations.
* @throws IllegalStateException if clientId and resourceId are both set.
*/
public DefaultAzureCredential build() {
if (managedIdentityClientId != null && managedIdentityResourceId != null) {
throw LOGGER.logExceptionAsError(
new IllegalStateException("Only one of managedIdentityResourceId and managedIdentityClientId can be specified."));
}
if (!CoreUtils.isNullOrEmpty(additionallyAllowedTenants)) {
identityClientOptions.setAdditionallyAllowedTenants(additionallyAllowedTenants);
}
return new DefaultAzureCredential(getCredentialsChain());
}
} | class DefaultAzureCredentialBuilder extends CredentialBuilderBase<DefaultAzureCredentialBuilder> {
private static final ClientLogger LOGGER = new ClientLogger(DefaultAzureCredentialBuilder.class);
private String tenantId;
private String managedIdentityClientId;
private String managedIdentityResourceId;
private List<String> additionallyAllowedTenants = IdentityUtil
.getAdditionalTenantsFromEnvironment(Configuration.getGlobalConfiguration().clone());
/**
* Creates an instance of a DefaultAzureCredentialBuilder.
*/
public DefaultAzureCredentialBuilder() {
Configuration configuration = Configuration.getGlobalConfiguration().clone();
tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
managedIdentityClientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
this.identityClientOptions.setIdentityLogOptionsImpl(new IdentityLogOptionsImpl(true));
}
/**
* Sets the tenant id of the user to authenticate through the {@link DefaultAzureCredential}. If unset, the value
* in the AZURE_TENANT_ID environment variable will be used. If neither is set, the default is null
* and will authenticate users to their default tenant.
*
* @param tenantId the tenant ID to set.
* @return An updated instance of this builder with the tenant id set as specified.
*/
public DefaultAzureCredentialBuilder tenantId(String tenantId) {
this.tenantId = tenantId;
return this;
}
/**
* Specifies the Azure Active Directory endpoint to acquire tokens.
* @param authorityHost the Azure Active Directory endpoint
* @return An updated instance of this builder with the authority host set as specified.
*/
public DefaultAzureCredentialBuilder authorityHost(String authorityHost) {
this.identityClientOptions.setAuthorityHost(authorityHost);
return this;
}
/**
* Specifies the KeePass database path to read the cached credentials of Azure toolkit for IntelliJ plugin.
* The {@code databasePath} is required on Windows platform. For macOS and Linux platform native key chain /
* key ring will be accessed respectively to retrieve the cached credentials.
*
* <p>This path can be located in the IntelliJ IDE.
* Windows: File -> Settings -> Appearance & Behavior -> System Settings -> Passwords. </p>
*
* @param databasePath the path to the KeePass database.
* @throws IllegalArgumentException if {@code databasePath} is either not specified or is empty.
* @return An updated instance of this builder with the KeePass database path set as specified.
*/
public DefaultAzureCredentialBuilder intelliJKeePassDatabasePath(String databasePath) {
if (CoreUtils.isNullOrEmpty(databasePath)) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("The KeePass database path is either empty or not configured."
+ " Please configure it on the builder."));
}
this.identityClientOptions.setIntelliJKeePassDatabasePath(databasePath);
return this;
}
/**
* Specifies the client ID of user assigned or system assigned identity, when this credential is running
* in an environment with managed identities. If unset, the value in the AZURE_CLIENT_ID environment variable
* will be used. If neither is set, the default value is null and will only work with system assigned
* managed identities and not user assigned managed identities.
*
* Only one of managedIdentityClientId and managedIdentityResourceId can be specified.
*
* @param clientId the client ID
* @return the DefaultAzureCredentialBuilder itself
*/
public DefaultAzureCredentialBuilder managedIdentityClientId(String clientId) {
this.managedIdentityClientId = clientId;
return this;
}
/**
* Specifies the resource ID of user assigned or system assigned identity, when this credential is running
* in an environment with managed identities. If unset, the value in the AZURE_CLIENT_ID environment variable
* will be used. If neither is set, the default value is null and will only work with system assigned
* managed identities and not user assigned managed identities.
*
* Only one of managedIdentityResourceId and managedIdentityClientId can be specified.
*
* @param resourceId the resource ID
* @return the DefaultAzureCredentialBuilder itself
*/
public DefaultAzureCredentialBuilder managedIdentityResourceId(String resourceId) {
this.managedIdentityResourceId = resourceId;
return this;
}
/**
* Specifies the ExecutorService to be used to execute the authentication requests.
* Developer is responsible for maintaining the lifecycle of the ExecutorService.
*
* <p>
* If this is not configured, the {@link ForkJoinPool
* also shared with other application tasks. If the common pool is heavily used for other tasks, authentication
* requests might starve and setting up this executor service should be considered.
* </p>
*
* <p> The executor service and can be safely shutdown if the TokenCredential is no longer being used by the
* Azure SDK clients and should be shutdown before the application exits. </p>
*
* @param executorService the executor service to use for executing authentication requests.
* @return An updated instance of this builder with the executor service set as specified.
*/
public DefaultAzureCredentialBuilder executorService(ExecutorService executorService) {
this.identityClientOptions.setExecutorService(executorService);
return this;
}
/**
* For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens.
* Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application is installed.
*
* @param additionallyAllowedTenants the additionally allowed tenants.
* @return An updated instance of this builder with the tenant id set as specified.
*/
@SuppressWarnings("unchecked")
public DefaultAzureCredentialBuilder additionallyAllowedTenants(String... additionallyAllowedTenants) {
this.additionallyAllowedTenants = IdentityUtil.resolveAdditionalTenants(Arrays.asList(additionallyAllowedTenants));
return this;
}
/**
* For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens.
* Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application is installed.
*
* @param additionallyAllowedTenants the additionally allowed tenants.
* @return An updated instance of this builder with the tenant id set as specified.
*/
@SuppressWarnings("unchecked")
public DefaultAzureCredentialBuilder additionallyAllowedTenants(List<String> additionallyAllowedTenants) {
this.additionallyAllowedTenants = IdentityUtil.resolveAdditionalTenants(additionallyAllowedTenants);
return this;
}
/**
* Creates new {@link DefaultAzureCredential} with the configured options set.
*
* @return a {@link DefaultAzureCredential} with the current configurations.
* @throws IllegalStateException if clientId and resourceId are both set.
*/
public DefaultAzureCredential build() {
if (managedIdentityClientId != null && managedIdentityResourceId != null) {
throw LOGGER.logExceptionAsError(
new IllegalStateException("Only one of managedIdentityResourceId and managedIdentityClientId can be specified."));
}
if (!CoreUtils.isNullOrEmpty(additionallyAllowedTenants)) {
identityClientOptions.setAdditionallyAllowedTenants(additionallyAllowedTenants);
}
return new DefaultAzureCredential(getCredentialsChain());
}
} |
updated | private ArrayList<TokenCredential> getCredentialsChain() {
ArrayList<TokenCredential> output = new ArrayList<TokenCredential>(7);
output.add(new EnvironmentCredential(identityClientOptions.clone()));
output.add(new ManagedIdentityCredential(managedIdentityClientId, managedIdentityResourceId, identityClientOptions.clone()));
output.add(new SharedTokenCacheCredential(null, IdentityConstants.DEVELOPER_SINGLE_SIGN_ON_ID,
tenantId, identityClientOptions.clone()));
output.add(new IntelliJCredential(tenantId, identityClientOptions.clone()));
output.add(new AzureDeveloperCliCredential(tenantId, identityClientOptions.clone()));
output.add(new AzureCliCredential(tenantId, identityClientOptions.clone()));
output.add(new AzurePowerShellCredential(tenantId, identityClientOptions.clone()));
return output;
} | output.add(new AzureDeveloperCliCredential(tenantId, identityClientOptions.clone())); | private ArrayList<TokenCredential> getCredentialsChain() {
ArrayList<TokenCredential> output = new ArrayList<TokenCredential>(7);
output.add(new EnvironmentCredential(identityClientOptions.clone()));
output.add(new ManagedIdentityCredential(managedIdentityClientId, managedIdentityResourceId, identityClientOptions.clone()));
output.add(new AzureDeveloperCliCredential(tenantId, identityClientOptions.clone()));
output.add(new SharedTokenCacheCredential(null, IdentityConstants.DEVELOPER_SINGLE_SIGN_ON_ID,
tenantId, identityClientOptions.clone()));
output.add(new IntelliJCredential(tenantId, identityClientOptions.clone()));
output.add(new AzureCliCredential(tenantId, identityClientOptions.clone()));
output.add(new AzurePowerShellCredential(tenantId, identityClientOptions.clone()));
return output;
} | class DefaultAzureCredentialBuilder extends CredentialBuilderBase<DefaultAzureCredentialBuilder> {
private static final ClientLogger LOGGER = new ClientLogger(DefaultAzureCredentialBuilder.class);
private String tenantId;
private String managedIdentityClientId;
private String managedIdentityResourceId;
private List<String> additionallyAllowedTenants = IdentityUtil
.getAdditionalTenantsFromEnvironment(Configuration.getGlobalConfiguration().clone());
/**
* Creates an instance of a DefaultAzureCredentialBuilder.
*/
public DefaultAzureCredentialBuilder() {
Configuration configuration = Configuration.getGlobalConfiguration().clone();
tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
managedIdentityClientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
this.identityClientOptions.setIdentityLogOptionsImpl(new IdentityLogOptionsImpl(true));
}
/**
* Sets the tenant id of the user to authenticate through the {@link DefaultAzureCredential}. If unset, the value
* in the AZURE_TENANT_ID environment variable will be used. If neither is set, the default is null
* and will authenticate users to their default tenant.
*
* @param tenantId the tenant ID to set.
* @return An updated instance of this builder with the tenant id set as specified.
*/
public DefaultAzureCredentialBuilder tenantId(String tenantId) {
this.tenantId = tenantId;
return this;
}
/**
* Specifies the Azure Active Directory endpoint to acquire tokens.
* @param authorityHost the Azure Active Directory endpoint
* @return An updated instance of this builder with the authority host set as specified.
*/
public DefaultAzureCredentialBuilder authorityHost(String authorityHost) {
this.identityClientOptions.setAuthorityHost(authorityHost);
return this;
}
/**
* Specifies the KeePass database path to read the cached credentials of Azure toolkit for IntelliJ plugin.
* The {@code databasePath} is required on Windows platform. For macOS and Linux platform native key chain /
* key ring will be accessed respectively to retrieve the cached credentials.
*
* <p>This path can be located in the IntelliJ IDE.
* Windows: File -> Settings -> Appearance & Behavior -> System Settings -> Passwords. </p>
*
* @param databasePath the path to the KeePass database.
* @throws IllegalArgumentException if {@code databasePath} is either not specified or is empty.
* @return An updated instance of this builder with the KeePass database path set as specified.
*/
public DefaultAzureCredentialBuilder intelliJKeePassDatabasePath(String databasePath) {
if (CoreUtils.isNullOrEmpty(databasePath)) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("The KeePass database path is either empty or not configured."
+ " Please configure it on the builder."));
}
this.identityClientOptions.setIntelliJKeePassDatabasePath(databasePath);
return this;
}
/**
* Specifies the client ID of user assigned or system assigned identity, when this credential is running
* in an environment with managed identities. If unset, the value in the AZURE_CLIENT_ID environment variable
* will be used. If neither is set, the default value is null and will only work with system assigned
* managed identities and not user assigned managed identities.
*
* Only one of managedIdentityClientId and managedIdentityResourceId can be specified.
*
* @param clientId the client ID
* @return the DefaultAzureCredentialBuilder itself
*/
public DefaultAzureCredentialBuilder managedIdentityClientId(String clientId) {
this.managedIdentityClientId = clientId;
return this;
}
/**
* Specifies the resource ID of user assigned or system assigned identity, when this credential is running
* in an environment with managed identities. If unset, the value in the AZURE_CLIENT_ID environment variable
* will be used. If neither is set, the default value is null and will only work with system assigned
* managed identities and not user assigned managed identities.
*
* Only one of managedIdentityResourceId and managedIdentityClientId can be specified.
*
* @param resourceId the resource ID
* @return the DefaultAzureCredentialBuilder itself
*/
public DefaultAzureCredentialBuilder managedIdentityResourceId(String resourceId) {
this.managedIdentityResourceId = resourceId;
return this;
}
/**
* Specifies the ExecutorService to be used to execute the authentication requests.
* Developer is responsible for maintaining the lifecycle of the ExecutorService.
*
* <p>
* If this is not configured, the {@link ForkJoinPool
* also shared with other application tasks. If the common pool is heavily used for other tasks, authentication
* requests might starve and setting up this executor service should be considered.
* </p>
*
* <p> The executor service and can be safely shutdown if the TokenCredential is no longer being used by the
* Azure SDK clients and should be shutdown before the application exits. </p>
*
* @param executorService the executor service to use for executing authentication requests.
* @return An updated instance of this builder with the executor service set as specified.
*/
public DefaultAzureCredentialBuilder executorService(ExecutorService executorService) {
this.identityClientOptions.setExecutorService(executorService);
return this;
}
/**
* For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens.
* Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application is installed.
*
* @param additionallyAllowedTenants the additionally allowed tenants.
* @return An updated instance of this builder with the tenant id set as specified.
*/
@SuppressWarnings("unchecked")
public DefaultAzureCredentialBuilder additionallyAllowedTenants(String... additionallyAllowedTenants) {
this.additionallyAllowedTenants = IdentityUtil.resolveAdditionalTenants(Arrays.asList(additionallyAllowedTenants));
return this;
}
/**
* For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens.
* Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application is installed.
*
* @param additionallyAllowedTenants the additionally allowed tenants.
* @return An updated instance of this builder with the tenant id set as specified.
*/
@SuppressWarnings("unchecked")
public DefaultAzureCredentialBuilder additionallyAllowedTenants(List<String> additionallyAllowedTenants) {
this.additionallyAllowedTenants = IdentityUtil.resolveAdditionalTenants(additionallyAllowedTenants);
return this;
}
/**
* Creates new {@link DefaultAzureCredential} with the configured options set.
*
* @return a {@link DefaultAzureCredential} with the current configurations.
* @throws IllegalStateException if clientId and resourceId are both set.
*/
public DefaultAzureCredential build() {
if (managedIdentityClientId != null && managedIdentityResourceId != null) {
throw LOGGER.logExceptionAsError(
new IllegalStateException("Only one of managedIdentityResourceId and managedIdentityClientId can be specified."));
}
if (!CoreUtils.isNullOrEmpty(additionallyAllowedTenants)) {
identityClientOptions.setAdditionallyAllowedTenants(additionallyAllowedTenants);
}
return new DefaultAzureCredential(getCredentialsChain());
}
} | class DefaultAzureCredentialBuilder extends CredentialBuilderBase<DefaultAzureCredentialBuilder> {
private static final ClientLogger LOGGER = new ClientLogger(DefaultAzureCredentialBuilder.class);
private String tenantId;
private String managedIdentityClientId;
private String managedIdentityResourceId;
private List<String> additionallyAllowedTenants = IdentityUtil
.getAdditionalTenantsFromEnvironment(Configuration.getGlobalConfiguration().clone());
/**
* Creates an instance of a DefaultAzureCredentialBuilder.
*/
public DefaultAzureCredentialBuilder() {
Configuration configuration = Configuration.getGlobalConfiguration().clone();
tenantId = configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID);
managedIdentityClientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID);
this.identityClientOptions.setIdentityLogOptionsImpl(new IdentityLogOptionsImpl(true));
}
/**
* Sets the tenant id of the user to authenticate through the {@link DefaultAzureCredential}. If unset, the value
* in the AZURE_TENANT_ID environment variable will be used. If neither is set, the default is null
* and will authenticate users to their default tenant.
*
* @param tenantId the tenant ID to set.
* @return An updated instance of this builder with the tenant id set as specified.
*/
public DefaultAzureCredentialBuilder tenantId(String tenantId) {
this.tenantId = tenantId;
return this;
}
/**
* Specifies the Azure Active Directory endpoint to acquire tokens.
* @param authorityHost the Azure Active Directory endpoint
* @return An updated instance of this builder with the authority host set as specified.
*/
public DefaultAzureCredentialBuilder authorityHost(String authorityHost) {
this.identityClientOptions.setAuthorityHost(authorityHost);
return this;
}
/**
* Specifies the KeePass database path to read the cached credentials of Azure toolkit for IntelliJ plugin.
* The {@code databasePath} is required on Windows platform. For macOS and Linux platform native key chain /
* key ring will be accessed respectively to retrieve the cached credentials.
*
* <p>This path can be located in the IntelliJ IDE.
* Windows: File -> Settings -> Appearance & Behavior -> System Settings -> Passwords. </p>
*
* @param databasePath the path to the KeePass database.
* @throws IllegalArgumentException if {@code databasePath} is either not specified or is empty.
* @return An updated instance of this builder with the KeePass database path set as specified.
*/
public DefaultAzureCredentialBuilder intelliJKeePassDatabasePath(String databasePath) {
if (CoreUtils.isNullOrEmpty(databasePath)) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("The KeePass database path is either empty or not configured."
+ " Please configure it on the builder."));
}
this.identityClientOptions.setIntelliJKeePassDatabasePath(databasePath);
return this;
}
/**
* Specifies the client ID of user assigned or system assigned identity, when this credential is running
* in an environment with managed identities. If unset, the value in the AZURE_CLIENT_ID environment variable
* will be used. If neither is set, the default value is null and will only work with system assigned
* managed identities and not user assigned managed identities.
*
* Only one of managedIdentityClientId and managedIdentityResourceId can be specified.
*
* @param clientId the client ID
* @return the DefaultAzureCredentialBuilder itself
*/
public DefaultAzureCredentialBuilder managedIdentityClientId(String clientId) {
this.managedIdentityClientId = clientId;
return this;
}
/**
* Specifies the resource ID of user assigned or system assigned identity, when this credential is running
* in an environment with managed identities. If unset, the value in the AZURE_CLIENT_ID environment variable
* will be used. If neither is set, the default value is null and will only work with system assigned
* managed identities and not user assigned managed identities.
*
* Only one of managedIdentityResourceId and managedIdentityClientId can be specified.
*
* @param resourceId the resource ID
* @return the DefaultAzureCredentialBuilder itself
*/
public DefaultAzureCredentialBuilder managedIdentityResourceId(String resourceId) {
this.managedIdentityResourceId = resourceId;
return this;
}
/**
* Specifies the ExecutorService to be used to execute the authentication requests.
* Developer is responsible for maintaining the lifecycle of the ExecutorService.
*
* <p>
* If this is not configured, the {@link ForkJoinPool
* also shared with other application tasks. If the common pool is heavily used for other tasks, authentication
* requests might starve and setting up this executor service should be considered.
* </p>
*
* <p> The executor service and can be safely shutdown if the TokenCredential is no longer being used by the
* Azure SDK clients and should be shutdown before the application exits. </p>
*
* @param executorService the executor service to use for executing authentication requests.
* @return An updated instance of this builder with the executor service set as specified.
*/
public DefaultAzureCredentialBuilder executorService(ExecutorService executorService) {
this.identityClientOptions.setExecutorService(executorService);
return this;
}
/**
* For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens.
* Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application is installed.
*
* @param additionallyAllowedTenants the additionally allowed tenants.
* @return An updated instance of this builder with the tenant id set as specified.
*/
@SuppressWarnings("unchecked")
public DefaultAzureCredentialBuilder additionallyAllowedTenants(String... additionallyAllowedTenants) {
this.additionallyAllowedTenants = IdentityUtil.resolveAdditionalTenants(Arrays.asList(additionallyAllowedTenants));
return this;
}
/**
* For multi-tenant applications, specifies additional tenants for which the credential may acquire tokens.
* Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application is installed.
*
* @param additionallyAllowedTenants the additionally allowed tenants.
* @return An updated instance of this builder with the tenant id set as specified.
*/
@SuppressWarnings("unchecked")
public DefaultAzureCredentialBuilder additionallyAllowedTenants(List<String> additionallyAllowedTenants) {
this.additionallyAllowedTenants = IdentityUtil.resolveAdditionalTenants(additionallyAllowedTenants);
return this;
}
/**
* Creates new {@link DefaultAzureCredential} with the configured options set.
*
* @return a {@link DefaultAzureCredential} with the current configurations.
* @throws IllegalStateException if clientId and resourceId are both set.
*/
public DefaultAzureCredential build() {
if (managedIdentityClientId != null && managedIdentityResourceId != null) {
throw LOGGER.logExceptionAsError(
new IllegalStateException("Only one of managedIdentityResourceId and managedIdentityClientId can be specified."));
}
if (!CoreUtils.isNullOrEmpty(additionallyAllowedTenants)) {
identityClientOptions.setAdditionallyAllowedTenants(additionallyAllowedTenants);
}
return new DefaultAzureCredential(getCredentialsChain());
}
} |
Previously, we didn't check for null and if the schema version was null, we'd throw an exception. This will change the behavior. | public SchemasGetByIdHeaders(HttpHeaders rawHeaders) {
String schemaVersion = rawHeaders.getValue(SCHEMA_VERSION);
if (schemaVersion != null) {
this.schemaVersion = Integer.parseInt(schemaVersion);
}
this.schemaId = rawHeaders.getValue(SCHEMA_ID);
this.schemaGroupName = rawHeaders.getValue(SCHEMA_GROUP_NAME);
this.schemaName = rawHeaders.getValue(SCHEMA_NAME);
this.schemaIdLocation = rawHeaders.getValue(SCHEMA_ID_LOCATION);
this.location = rawHeaders.getValue(HttpHeaderName.LOCATION);
this.contentType = rawHeaders.getValue(HttpHeaderName.CONTENT_TYPE);
} | } | public SchemasGetByIdHeaders(HttpHeaders rawHeaders) {
String schemaVersion = rawHeaders.getValue(SCHEMA_VERSION);
if (schemaVersion != null) {
this.schemaVersion = Integer.parseInt(schemaVersion);
}
this.schemaId = rawHeaders.getValue(SCHEMA_ID);
this.schemaGroupName = rawHeaders.getValue(SCHEMA_GROUP_NAME);
this.schemaName = rawHeaders.getValue(SCHEMA_NAME);
this.schemaIdLocation = rawHeaders.getValue(SCHEMA_ID_LOCATION);
this.location = rawHeaders.getValue(HttpHeaderName.LOCATION);
String contentType = rawHeaders.getValue(HttpHeaderName.CONTENT_TYPE);
if (contentType != null) {
this.contentType = SchemaFormat.fromString(contentType);
}
} | class SchemasGetByIdHeaders {
/*
* The Schema-Version property.
*/
@JsonProperty(value = "Schema-Version")
private Integer schemaVersion;
/*
* The Schema-Id property.
*/
@JsonProperty(value = "Schema-Id")
private String schemaId;
/*
* The Schema-Group-Name property.
*/
@JsonProperty(value = "Schema-Group-Name")
private String schemaGroupName;
/*
* The Schema-Name property.
*/
@JsonProperty(value = "Schema-Name")
private String schemaName;
/*
* The Schema-Id-Location property.
*/
@JsonProperty(value = "Schema-Id-Location")
private String schemaIdLocation;
/*
* The Location property.
*/
@JsonProperty(value = "Location")
private String location;
/*
* The Content-Type property.
*/
@JsonProperty(value = "Content-Type")
private String contentType;
private static final HttpHeaderName SCHEMA_VERSION = HttpHeaderName.fromString("Schema-Version");
private static final HttpHeaderName SCHEMA_ID = HttpHeaderName.fromString("Schema-Id");
private static final HttpHeaderName SCHEMA_GROUP_NAME = HttpHeaderName.fromString("Schema-Group-Name");
private static final HttpHeaderName SCHEMA_NAME = HttpHeaderName.fromString("Schema-Name");
private static final HttpHeaderName SCHEMA_ID_LOCATION = HttpHeaderName.fromString("Schema-Id-Location");
/**
* Creates an instance of SchemasGetByIdHeaders class.
*
* @param rawHeaders The raw HttpHeaders that will be used to create the property values.
*/
/**
* Get the schemaVersion property: The Schema-Version property.
*
* @return the schemaVersion value.
*/
public Integer getSchemaVersion() {
return this.schemaVersion;
}
/**
* Set the schemaVersion property: The Schema-Version property.
*
* @param schemaVersion the schemaVersion value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaVersion(Integer schemaVersion) {
this.schemaVersion = schemaVersion;
return this;
}
/**
* Get the schemaId property: The Schema-Id property.
*
* @return the schemaId value.
*/
public String getSchemaId() {
return this.schemaId;
}
/**
* Set the schemaId property: The Schema-Id property.
*
* @param schemaId the schemaId value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaId(String schemaId) {
this.schemaId = schemaId;
return this;
}
/**
* Get the schemaGroupName property: The Schema-Group-Name property.
*
* @return the schemaGroupName value.
*/
public String getSchemaGroupName() {
return this.schemaGroupName;
}
/**
* Set the schemaGroupName property: The Schema-Group-Name property.
*
* @param schemaGroupName the schemaGroupName value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaGroupName(String schemaGroupName) {
this.schemaGroupName = schemaGroupName;
return this;
}
/**
* Get the schemaName property: The Schema-Name property.
*
* @return the schemaName value.
*/
public String getSchemaName() {
return this.schemaName;
}
/**
* Set the schemaName property: The Schema-Name property.
*
* @param schemaName the schemaName value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaName(String schemaName) {
this.schemaName = schemaName;
return this;
}
/**
* Get the schemaIdLocation property: The Schema-Id-Location property.
*
* @return the schemaIdLocation value.
*/
public String getSchemaIdLocation() {
return this.schemaIdLocation;
}
/**
* Set the schemaIdLocation property: The Schema-Id-Location property.
*
* @param schemaIdLocation the schemaIdLocation value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaIdLocation(String schemaIdLocation) {
this.schemaIdLocation = schemaIdLocation;
return this;
}
/**
* Get the location property: The Location property.
*
* @return the location value.
*/
public String getLocation() {
return this.location;
}
/**
* Set the location property: The Location property.
*
* @param location the location value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setLocation(String location) {
this.location = location;
return this;
}
/**
* Get the contentType property: The Content-Type property.
*
* @return the contentType value.
*/
public String getContentType() {
return this.contentType;
}
/**
* Set the contentType property: The Content-Type property.
*
* @param contentType the contentType value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setContentType(String contentType) {
this.contentType = contentType;
return this;
}
} | class SchemasGetByIdHeaders {
/*
* The Schema-Version property.
*/
@JsonProperty(value = "Schema-Version")
private Integer schemaVersion;
/*
* The Schema-Id property.
*/
@JsonProperty(value = "Schema-Id")
private String schemaId;
/*
* The Schema-Group-Name property.
*/
@JsonProperty(value = "Schema-Group-Name")
private String schemaGroupName;
/*
* The Schema-Name property.
*/
@JsonProperty(value = "Schema-Name")
private String schemaName;
/*
* The Schema-Id-Location property.
*/
@JsonProperty(value = "Schema-Id-Location")
private String schemaIdLocation;
/*
* The Location property.
*/
@JsonProperty(value = "Location")
private String location;
/*
* The Content-Type property.
*/
@JsonProperty(value = "Content-Type")
private SchemaFormat contentType;
private static final HttpHeaderName SCHEMA_VERSION = HttpHeaderName.fromString("Schema-Version");
private static final HttpHeaderName SCHEMA_ID = HttpHeaderName.fromString("Schema-Id");
private static final HttpHeaderName SCHEMA_GROUP_NAME = HttpHeaderName.fromString("Schema-Group-Name");
private static final HttpHeaderName SCHEMA_NAME = HttpHeaderName.fromString("Schema-Name");
private static final HttpHeaderName SCHEMA_ID_LOCATION = HttpHeaderName.fromString("Schema-Id-Location");
/**
* Creates an instance of SchemasGetByIdHeaders class.
*
* @param rawHeaders The raw HttpHeaders that will be used to create the property values.
*/
/**
* Get the schemaVersion property: The Schema-Version property.
*
* @return the schemaVersion value.
*/
public Integer getSchemaVersion() {
return this.schemaVersion;
}
/**
* Set the schemaVersion property: The Schema-Version property.
*
* @param schemaVersion the schemaVersion value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaVersion(Integer schemaVersion) {
this.schemaVersion = schemaVersion;
return this;
}
/**
* Get the schemaId property: The Schema-Id property.
*
* @return the schemaId value.
*/
public String getSchemaId() {
return this.schemaId;
}
/**
* Set the schemaId property: The Schema-Id property.
*
* @param schemaId the schemaId value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaId(String schemaId) {
this.schemaId = schemaId;
return this;
}
/**
* Get the schemaGroupName property: The Schema-Group-Name property.
*
* @return the schemaGroupName value.
*/
public String getSchemaGroupName() {
return this.schemaGroupName;
}
/**
* Set the schemaGroupName property: The Schema-Group-Name property.
*
* @param schemaGroupName the schemaGroupName value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaGroupName(String schemaGroupName) {
this.schemaGroupName = schemaGroupName;
return this;
}
/**
* Get the schemaName property: The Schema-Name property.
*
* @return the schemaName value.
*/
public String getSchemaName() {
return this.schemaName;
}
/**
* Set the schemaName property: The Schema-Name property.
*
* @param schemaName the schemaName value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaName(String schemaName) {
this.schemaName = schemaName;
return this;
}
/**
* Get the schemaIdLocation property: The Schema-Id-Location property.
*
* @return the schemaIdLocation value.
*/
public String getSchemaIdLocation() {
return this.schemaIdLocation;
}
/**
* Set the schemaIdLocation property: The Schema-Id-Location property.
*
* @param schemaIdLocation the schemaIdLocation value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaIdLocation(String schemaIdLocation) {
this.schemaIdLocation = schemaIdLocation;
return this;
}
/**
* Get the location property: The Location property.
*
* @return the location value.
*/
public String getLocation() {
return this.location;
}
/**
* Set the location property: The Location property.
*
* @param location the location value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setLocation(String location) {
this.location = location;
return this;
}
/**
* Get the contentType property: The Content-Type property.
*
* @return the contentType value.
*/
public SchemaFormat getContentType() {
return this.contentType;
}
/**
* Set the contentType property: The Content-Type property.
*
* @param contentType the contentType value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setContentType(SchemaFormat contentType) {
this.contentType = contentType;
return this;
}
} |
This seems to be changed by Autorest generation. We can hand revert it with a note here. Open to other ideas. | public SchemasGetByIdHeaders(HttpHeaders rawHeaders) {
String schemaVersion = rawHeaders.getValue(SCHEMA_VERSION);
if (schemaVersion != null) {
this.schemaVersion = Integer.parseInt(schemaVersion);
}
this.schemaId = rawHeaders.getValue(SCHEMA_ID);
this.schemaGroupName = rawHeaders.getValue(SCHEMA_GROUP_NAME);
this.schemaName = rawHeaders.getValue(SCHEMA_NAME);
this.schemaIdLocation = rawHeaders.getValue(SCHEMA_ID_LOCATION);
this.location = rawHeaders.getValue(HttpHeaderName.LOCATION);
this.contentType = rawHeaders.getValue(HttpHeaderName.CONTENT_TYPE);
} | } | public SchemasGetByIdHeaders(HttpHeaders rawHeaders) {
String schemaVersion = rawHeaders.getValue(SCHEMA_VERSION);
if (schemaVersion != null) {
this.schemaVersion = Integer.parseInt(schemaVersion);
}
this.schemaId = rawHeaders.getValue(SCHEMA_ID);
this.schemaGroupName = rawHeaders.getValue(SCHEMA_GROUP_NAME);
this.schemaName = rawHeaders.getValue(SCHEMA_NAME);
this.schemaIdLocation = rawHeaders.getValue(SCHEMA_ID_LOCATION);
this.location = rawHeaders.getValue(HttpHeaderName.LOCATION);
String contentType = rawHeaders.getValue(HttpHeaderName.CONTENT_TYPE);
if (contentType != null) {
this.contentType = SchemaFormat.fromString(contentType);
}
} | class SchemasGetByIdHeaders {
/*
* The Schema-Version property.
*/
@JsonProperty(value = "Schema-Version")
private Integer schemaVersion;
/*
* The Schema-Id property.
*/
@JsonProperty(value = "Schema-Id")
private String schemaId;
/*
* The Schema-Group-Name property.
*/
@JsonProperty(value = "Schema-Group-Name")
private String schemaGroupName;
/*
* The Schema-Name property.
*/
@JsonProperty(value = "Schema-Name")
private String schemaName;
/*
* The Schema-Id-Location property.
*/
@JsonProperty(value = "Schema-Id-Location")
private String schemaIdLocation;
/*
* The Location property.
*/
@JsonProperty(value = "Location")
private String location;
/*
* The Content-Type property.
*/
@JsonProperty(value = "Content-Type")
private String contentType;
private static final HttpHeaderName SCHEMA_VERSION = HttpHeaderName.fromString("Schema-Version");
private static final HttpHeaderName SCHEMA_ID = HttpHeaderName.fromString("Schema-Id");
private static final HttpHeaderName SCHEMA_GROUP_NAME = HttpHeaderName.fromString("Schema-Group-Name");
private static final HttpHeaderName SCHEMA_NAME = HttpHeaderName.fromString("Schema-Name");
private static final HttpHeaderName SCHEMA_ID_LOCATION = HttpHeaderName.fromString("Schema-Id-Location");
/**
* Creates an instance of SchemasGetByIdHeaders class.
*
* @param rawHeaders The raw HttpHeaders that will be used to create the property values.
*/
/**
* Get the schemaVersion property: The Schema-Version property.
*
* @return the schemaVersion value.
*/
public Integer getSchemaVersion() {
return this.schemaVersion;
}
/**
* Set the schemaVersion property: The Schema-Version property.
*
* @param schemaVersion the schemaVersion value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaVersion(Integer schemaVersion) {
this.schemaVersion = schemaVersion;
return this;
}
/**
* Get the schemaId property: The Schema-Id property.
*
* @return the schemaId value.
*/
public String getSchemaId() {
return this.schemaId;
}
/**
* Set the schemaId property: The Schema-Id property.
*
* @param schemaId the schemaId value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaId(String schemaId) {
this.schemaId = schemaId;
return this;
}
/**
* Get the schemaGroupName property: The Schema-Group-Name property.
*
* @return the schemaGroupName value.
*/
public String getSchemaGroupName() {
return this.schemaGroupName;
}
/**
* Set the schemaGroupName property: The Schema-Group-Name property.
*
* @param schemaGroupName the schemaGroupName value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaGroupName(String schemaGroupName) {
this.schemaGroupName = schemaGroupName;
return this;
}
/**
* Get the schemaName property: The Schema-Name property.
*
* @return the schemaName value.
*/
public String getSchemaName() {
return this.schemaName;
}
/**
* Set the schemaName property: The Schema-Name property.
*
* @param schemaName the schemaName value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaName(String schemaName) {
this.schemaName = schemaName;
return this;
}
/**
* Get the schemaIdLocation property: The Schema-Id-Location property.
*
* @return the schemaIdLocation value.
*/
public String getSchemaIdLocation() {
return this.schemaIdLocation;
}
/**
* Set the schemaIdLocation property: The Schema-Id-Location property.
*
* @param schemaIdLocation the schemaIdLocation value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaIdLocation(String schemaIdLocation) {
this.schemaIdLocation = schemaIdLocation;
return this;
}
/**
* Get the location property: The Location property.
*
* @return the location value.
*/
public String getLocation() {
return this.location;
}
/**
* Set the location property: The Location property.
*
* @param location the location value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setLocation(String location) {
this.location = location;
return this;
}
/**
* Get the contentType property: The Content-Type property.
*
* @return the contentType value.
*/
public String getContentType() {
return this.contentType;
}
/**
* Set the contentType property: The Content-Type property.
*
* @param contentType the contentType value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setContentType(String contentType) {
this.contentType = contentType;
return this;
}
} | class SchemasGetByIdHeaders {
/*
* The Schema-Version property.
*/
@JsonProperty(value = "Schema-Version")
private Integer schemaVersion;
/*
* The Schema-Id property.
*/
@JsonProperty(value = "Schema-Id")
private String schemaId;
/*
* The Schema-Group-Name property.
*/
@JsonProperty(value = "Schema-Group-Name")
private String schemaGroupName;
/*
* The Schema-Name property.
*/
@JsonProperty(value = "Schema-Name")
private String schemaName;
/*
* The Schema-Id-Location property.
*/
@JsonProperty(value = "Schema-Id-Location")
private String schemaIdLocation;
/*
* The Location property.
*/
@JsonProperty(value = "Location")
private String location;
/*
* The Content-Type property.
*/
@JsonProperty(value = "Content-Type")
private SchemaFormat contentType;
private static final HttpHeaderName SCHEMA_VERSION = HttpHeaderName.fromString("Schema-Version");
private static final HttpHeaderName SCHEMA_ID = HttpHeaderName.fromString("Schema-Id");
private static final HttpHeaderName SCHEMA_GROUP_NAME = HttpHeaderName.fromString("Schema-Group-Name");
private static final HttpHeaderName SCHEMA_NAME = HttpHeaderName.fromString("Schema-Name");
private static final HttpHeaderName SCHEMA_ID_LOCATION = HttpHeaderName.fromString("Schema-Id-Location");
/**
* Creates an instance of SchemasGetByIdHeaders class.
*
* @param rawHeaders The raw HttpHeaders that will be used to create the property values.
*/
/**
* Get the schemaVersion property: The Schema-Version property.
*
* @return the schemaVersion value.
*/
public Integer getSchemaVersion() {
return this.schemaVersion;
}
/**
* Set the schemaVersion property: The Schema-Version property.
*
* @param schemaVersion the schemaVersion value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaVersion(Integer schemaVersion) {
this.schemaVersion = schemaVersion;
return this;
}
/**
* Get the schemaId property: The Schema-Id property.
*
* @return the schemaId value.
*/
public String getSchemaId() {
return this.schemaId;
}
/**
* Set the schemaId property: The Schema-Id property.
*
* @param schemaId the schemaId value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaId(String schemaId) {
this.schemaId = schemaId;
return this;
}
/**
* Get the schemaGroupName property: The Schema-Group-Name property.
*
* @return the schemaGroupName value.
*/
public String getSchemaGroupName() {
return this.schemaGroupName;
}
/**
* Set the schemaGroupName property: The Schema-Group-Name property.
*
* @param schemaGroupName the schemaGroupName value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaGroupName(String schemaGroupName) {
this.schemaGroupName = schemaGroupName;
return this;
}
/**
* Get the schemaName property: The Schema-Name property.
*
* @return the schemaName value.
*/
public String getSchemaName() {
return this.schemaName;
}
/**
* Set the schemaName property: The Schema-Name property.
*
* @param schemaName the schemaName value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaName(String schemaName) {
this.schemaName = schemaName;
return this;
}
/**
* Get the schemaIdLocation property: The Schema-Id-Location property.
*
* @return the schemaIdLocation value.
*/
public String getSchemaIdLocation() {
return this.schemaIdLocation;
}
/**
* Set the schemaIdLocation property: The Schema-Id-Location property.
*
* @param schemaIdLocation the schemaIdLocation value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setSchemaIdLocation(String schemaIdLocation) {
this.schemaIdLocation = schemaIdLocation;
return this;
}
/**
* Get the location property: The Location property.
*
* @return the location value.
*/
public String getLocation() {
return this.location;
}
/**
* Set the location property: The Location property.
*
* @param location the location value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setLocation(String location) {
this.location = location;
return this;
}
/**
* Get the contentType property: The Content-Type property.
*
* @return the contentType value.
*/
public SchemaFormat getContentType() {
return this.contentType;
}
/**
* Set the contentType property: The Content-Type property.
*
* @param contentType the contentType value to set.
* @return the SchemasGetByIdHeaders object itself.
*/
public SchemasGetByIdHeaders setContentType(SchemaFormat contentType) {
this.contentType = contentType;
return this;
}
} |
We can just use `Mono.just(builder.toString()`. `Mono.defer()` is not needed here. | static Mono<String> convertToString(Flux<ByteBuffer> byteBufferFlux) {
final StringBuilder builder = new StringBuilder();
return byteBufferFlux
.map(byteBuffer -> {
builder.append(new String(byteBuffer.array(), StandardCharsets.UTF_8));
return Mono.empty();
}).then(Mono.defer(() -> Mono.just(builder.toString())));
} | }).then(Mono.defer(() -> Mono.just(builder.toString()))); | static Mono<String> convertToString(Flux<ByteBuffer> byteBufferFlux) {
final StringBuilder builder = new StringBuilder();
return byteBufferFlux
.map(byteBuffer -> {
builder.append(new String(byteBuffer.array(), StandardCharsets.UTF_8));
return Mono.empty();
}).then(Mono.defer(() -> Mono.just(builder.toString())));
} | class SchemaRegistryAsyncClient {
private final ClientLogger logger = new ClientLogger(SchemaRegistryAsyncClient.class);
private final AzureSchemaRegistryImpl restService;
SchemaRegistryAsyncClient(AzureSchemaRegistryImpl restService) {
this.restService = restService;
new SchemaProperties("", SchemaFormat.AVRO);
}
/**
* Gets the fully qualified namespace of the Schema Registry instance.
*
* @return The fully qualified namespace of the Schema Registry instance.
*/
public String getFullyQualifiedNamespace() {
return this.restService.getEndpoint();
}
/**
* Registers a new schema in the specified schema group with the given schema name. If a schema
* <b>does not exist</b>does not exist with the same {@code groupName}, {@code name}, {@code format}, and
* {@code schemaDefinition}, it is added to the Schema Registry Instance and assigned a schema id. If a schema
* exists with a matching {@code groupName}, {@code name}, {@code format}, and {@code schemaDefinition}, the id of
* that schema is returned. If the Schema Registry instance contains an existing {@code groupName}, {@code name},
* and {@code format} but the {@code schemaDefinition} is different, it is considered a new version, and schema id
* is assigned to it.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
*
* @return The {@link SchemaProperties} of a successfully registered schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code format}, or {@code schemaDefinition}
* are null.
* @throws HttpResponseException if an issue was encountered while registering the schema.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SchemaProperties> registerSchema(String groupName, String name, String schemaDefinition,
SchemaFormat format) {
return registerSchemaWithResponse(groupName, name, schemaDefinition, format)
.map(Response::getValue);
}
/**
* Registers a new schema in the specified schema group with the given schema name. If a schema
* <b>does not exist</b>does not exist with the same {@code groupName}, {@code name}, {@code format}, and
* {@code schemaDefinition}, it is added to the Schema Registry Instance and assigned a schema id. If a schema
* exists with a matching {@code groupName}, {@code name}, {@code format}, and {@code schemaDefinition}, the id of
* that schema is returned. If the Schema Registry instance contains an existing {@code groupName}, {@code name},
* and {@code format} but the {@code schemaDefinition} is different, it is considered a new version, and schema id
* is assigned to it.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
*
* @return The schema properties on successful registration of the schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code format}, or {@code schemaDefinition}
* are null.
* @throws HttpResponseException if an issue was encountered while registering the schema.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SchemaProperties>> registerSchemaWithResponse(String groupName, String name,
String schemaDefinition, SchemaFormat format) {
return FluxUtil.withContext(context -> registerSchemaWithResponse(groupName, name, schemaDefinition,
format, context));
}
Mono<Response<SchemaProperties>> registerSchemaWithResponse(String groupName, String name, String schemaDefinition,
SchemaFormat format, Context context) {
if (Objects.isNull(groupName)) {
return monoError(logger, new NullPointerException("'groupName' should not be null."));
} else if (Objects.isNull(name)) {
return monoError(logger, new NullPointerException("'name' should not be null."));
} else if (Objects.isNull(schemaDefinition)) {
return monoError(logger, new NullPointerException("'schemaDefinition' should not be null."));
} else if (Objects.isNull(format)) {
return monoError(logger, new NullPointerException("'format' should not be null."));
}
logger.verbose("Registering schema. Group: '{}', name: '{}', serialization type: '{}', payload: '{}'",
groupName, name, format, schemaDefinition);
final BinaryData binaryData = BinaryData.fromString(schemaDefinition);
return restService.getSchemas().registerWithResponseAsync(groupName, name, binaryData, binaryData.getLength(),
context)
.map(response -> {
final SchemaProperties registered = SchemaRegistryHelper.getSchemaPropertiesFromSchemaRegisterHeaders(response);
return new SimpleResponse<>(
response.getRequest(), response.getStatusCode(),
response.getHeaders(), registered);
});
}
/**
* Gets the schema properties of the schema associated with the unique schema id.
*
* @param schemaId The unique identifier of the schema.
*
* @return The {@link SchemaRegistrySchema} associated with the given {@code schemaId}.
*
* @throws NullPointerException if {@code schemaId} is null.
* @throws ResourceNotFoundException if a schema with the matching {@code schemaId} could not be found.
* @throws HttpResponseException if an issue was encountered while fetching the schema.
* @throws UncheckedIOException if an error occurred while deserializing response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SchemaRegistrySchema> getSchema(String schemaId) {
return getSchemaWithResponse(schemaId).map(Response::getValue);
}
/**
* Gets the schema properties of the schema associated with the group name, schema name, and schema version.
*
* @param groupName Group name for the schema
* @param schemaName Name of the schema
* @param schemaVersion Version of schema
*
* @return The {@link SchemaRegistrySchema} matching the parameters.
*
* @throws NullPointerException if {@code groupName} or {@code schemaName} is null.
* @throws ResourceNotFoundException if a schema with the matching {@code groupName} or {@code schemaName} could
* not be found.
* @throws HttpResponseException if an issue was encountered while fetching the schema.
* @throws UncheckedIOException if an error occurred while deserializing response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SchemaRegistrySchema> getSchema(String groupName, String schemaName, int schemaVersion) {
return getSchemaWithResponse(groupName, schemaName, schemaVersion).map(Response::getValue);
}
/**
* Gets the schema properties of the schema associated with the unique schema id.
*
* @param schemaId The unique identifier of the schema.
*
* @return The {@link SchemaRegistrySchema} associated with the given {@code schemaId} along with the HTTP response.
*
* @throws NullPointerException if {@code schemaId} is null.
* @throws ResourceNotFoundException if a schema with the matching {@code schemaId} could not be found.
* @throws HttpResponseException if an issue was encountered while fetching the schema.
* @throws UncheckedIOException if an error occurred while deserializing response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SchemaRegistrySchema>> getSchemaWithResponse(String schemaId) {
return FluxUtil.withContext(context -> getSchemaWithResponse(schemaId, context));
}
/**
* Gets the schema properties of the schema associated with the group name, schema name, and schema version.
*
* @param groupName Group name for the schema
* @param schemaName Name of the schema
* @param schemaVersion Version of schema
*
* @return The {@link SchemaRegistrySchema} matching the parameters.
*
* @throws NullPointerException if {@code groupName} or {@code schemaName} is null.
* @throws ResourceNotFoundException if a schema with the matching {@code groupName} or {@code schemaName} could
* not be found.
* @throws HttpResponseException if an issue was encountered while fetching the schema.
* @throws UncheckedIOException if an error occurred while deserializing response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SchemaRegistrySchema>> getSchemaWithResponse(String groupName, String schemaName,
int schemaVersion) {
return FluxUtil.withContext(context -> getSchemaWithResponse(groupName, schemaName, schemaVersion, context));
}
Mono<Response<SchemaRegistrySchema>> getSchemaWithResponse(String schemaId, Context context) {
if (Objects.isNull(schemaId)) {
return monoError(logger, new NullPointerException("'schemaId' should not be null."));
}
return this.restService.getSchemas().getByIdWithResponseAsync(schemaId, context)
.onErrorMap(ErrorException.class, SchemaRegistryAsyncClient::remapError)
.handle((response, sink) -> {
final SchemaProperties schemaObject = SchemaRegistryHelper.getSchemaPropertiesFromSchemasGetByIdHeaders(response);
final String schema;
try {
schema = convertToString(response.getValue().toStream());
} catch (UncheckedIOException e) {
sink.error(e);
return;
}
sink.next(new SimpleResponse<>(
response.getRequest(), response.getStatusCode(),
response.getHeaders(), new SchemaRegistrySchema(schemaObject, schema)));
sink.complete();
});
}
Mono<Response<SchemaRegistrySchema>> getSchemaWithResponse(String groupName, String schemaName, int schemaVersion,
Context context) {
if (Objects.isNull(groupName)) {
return monoError(logger, new NullPointerException("'groupName' should not be null."));
}
return this.restService.getSchemas().getSchemaVersionWithResponseAsync(groupName, schemaName, schemaVersion,
context)
.onErrorMap(ErrorException.class, SchemaRegistryAsyncClient::remapError)
.flatMap(response -> {
final Flux<ByteBuffer> schemaFlux = response.getValue().toFluxByteBuffer();
final SchemaProperties schemaObject = SchemaRegistryHelper.getSchemaPropertiesFromSchemasGetSchemaVersionHeaders(response);
if (schemaFlux == null) {
return Mono.error(new IllegalArgumentException(String.format(
"Schema definition should not be null. Group Name: %s. Schema Name: %s. Version: %d",
groupName, schemaName, schemaVersion)));
}
return convertToString(schemaFlux)
.map(schema -> new SimpleResponse<>(
response.getRequest(), response.getStatusCode(),
response.getHeaders(), new SchemaRegistrySchema(schemaObject, schema)));
});
}
/**
* Gets the schema identifier associated with the given schema. Gets a cached value if it exists, otherwise makes a
* call to the service.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
*
* @return A mono that completes with the properties for a matching schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code schemaDefinition}, or {@code format}
* is null.
* @throws ResourceNotFoundException if a schema with matching parameters could not be located.
* @throws HttpResponseException if an issue was encountered while finding a matching schema.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SchemaProperties> getSchemaProperties(String groupName, String name, String schemaDefinition,
SchemaFormat format) {
return getSchemaPropertiesWithResponse(groupName, name, schemaDefinition, format)
.map(response -> response.getValue());
}
/**
* Gets the schema identifier associated with the given schema. Always makes a call to the service.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
*
* @return A mono that completes with the properties for a matching schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code schemaDefinition}, or {@code format}
* is null.
* @throws ResourceNotFoundException if a schema with matching parameters could not be located.
* @throws HttpResponseException if an issue was encountered while finding a matching schema.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SchemaProperties>> getSchemaPropertiesWithResponse(String groupName, String name,
String schemaDefinition, SchemaFormat format) {
return FluxUtil.withContext(context ->
getSchemaPropertiesWithResponse(groupName, name, schemaDefinition, format, context));
}
/**
* Gets the schema id associated with the schema name a string representation of the schema.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
* @param context Context to pass along with this request.
*
* @return A mono that completes with the properties for a matching schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code schemaDefinition}, or {@code format}
* is null.
* @throws ResourceNotFoundException if a schema with matching parameters could not be located.
* @throws HttpResponseException if an issue was encountered while finding a matching schema.
*/
Mono<Response<SchemaProperties>> getSchemaPropertiesWithResponse(String groupName, String name,
String schemaDefinition, SchemaFormat format, Context context) {
if (Objects.isNull(groupName)) {
return monoError(logger, new NullPointerException("'groupName' cannot be null."));
} else if (Objects.isNull(name)) {
return monoError(logger, new NullPointerException("'name' cannot be null."));
} else if (Objects.isNull(schemaDefinition)) {
return monoError(logger, new NullPointerException("'schemaDefinition' cannot be null."));
} else if (Objects.isNull(format)) {
return monoError(logger, new NullPointerException("'format' cannot be null."));
}
if (context == null) {
context = Context.NONE;
}
final BinaryData binaryData = BinaryData.fromString(schemaDefinition);
return restService.getSchemas()
.queryIdByContentWithResponseAsync(groupName, name, binaryData, binaryData.getLength(), context)
.onErrorMap(ErrorException.class, SchemaRegistryAsyncClient::remapError)
.map(response -> {
final SchemaProperties properties = SchemaRegistryHelper.getSchemaPropertiesFromSchemasQueryIdByContentHeaders(response);
return new SimpleResponse<>(
response.getRequest(), response.getStatusCode(),
response.getHeaders(), properties);
});
}
/**
* Remaps a generic ErrorException to more specific HTTP exceptions.
*
* @param error Error to map.
*
* @return The remapped error.
*/
static HttpResponseException remapError(ErrorException error) {
if (error.getResponse().getStatusCode() == 404) {
final String message;
if (error.getValue() != null && error.getValue().getError() != null) {
message = error.getValue().getError().getMessage();
} else {
message = error.getMessage();
}
return new ResourceNotFoundException(message, error.getResponse(), error);
}
return error;
}
/**
* Converts an input stream into its string representation.
*
* @param inputStream Input stream.
*
* @return A string representation.
*
* @throws UncheckedIOException if an {@link IOException} is thrown when creating the readers.
*/
static String convertToString(InputStream inputStream) {
final StringBuilder builder = new StringBuilder();
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(inputStream, StandardCharsets.UTF_8))) {
String str;
while ((str = reader.readLine()) != null) {
builder.append(str);
}
} catch (IOException exception) {
throw new UncheckedIOException("Error occurred while deserializing schemaContent.", exception);
}
return builder.toString();
}
/**
* Converts a Flux of Byte Buffer into its string representation.
*
* @param byteBufferFlux the Byte Buffer Flux input.
*
* @return A string representation.
*
*/
} | class SchemaRegistryAsyncClient {
private final ClientLogger logger = new ClientLogger(SchemaRegistryAsyncClient.class);
private final AzureSchemaRegistryImpl restService;
SchemaRegistryAsyncClient(AzureSchemaRegistryImpl restService) {
this.restService = restService;
new SchemaProperties("", SchemaFormat.AVRO);
}
/**
* Gets the fully qualified namespace of the Schema Registry instance.
*
* @return The fully qualified namespace of the Schema Registry instance.
*/
public String getFullyQualifiedNamespace() {
return this.restService.getEndpoint();
}
/**
* Registers a new schema in the specified schema group with the given schema name. If a schema
* <b>does not exist</b>does not exist with the same {@code groupName}, {@code name}, {@code format}, and
* {@code schemaDefinition}, it is added to the Schema Registry Instance and assigned a schema id. If a schema
* exists with a matching {@code groupName}, {@code name}, {@code format}, and {@code schemaDefinition}, the id of
* that schema is returned. If the Schema Registry instance contains an existing {@code groupName}, {@code name},
* and {@code format} but the {@code schemaDefinition} is different, it is considered a new version, and schema id
* is assigned to it.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
*
* @return The {@link SchemaProperties} of a successfully registered schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code format}, or {@code schemaDefinition}
* are null.
* @throws HttpResponseException if an issue was encountered while registering the schema.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SchemaProperties> registerSchema(String groupName, String name, String schemaDefinition,
SchemaFormat format) {
return registerSchemaWithResponse(groupName, name, schemaDefinition, format)
.map(Response::getValue);
}
/**
* Registers a new schema in the specified schema group with the given schema name. If a schema
* <b>does not exist</b>does not exist with the same {@code groupName}, {@code name}, {@code format}, and
* {@code schemaDefinition}, it is added to the Schema Registry Instance and assigned a schema id. If a schema
* exists with a matching {@code groupName}, {@code name}, {@code format}, and {@code schemaDefinition}, the id of
* that schema is returned. If the Schema Registry instance contains an existing {@code groupName}, {@code name},
* and {@code format} but the {@code schemaDefinition} is different, it is considered a new version, and schema id
* is assigned to it.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
*
* @return The schema properties on successful registration of the schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code format}, or {@code schemaDefinition}
* are null.
* @throws HttpResponseException if an issue was encountered while registering the schema.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SchemaProperties>> registerSchemaWithResponse(String groupName, String name,
String schemaDefinition, SchemaFormat format) {
return FluxUtil.withContext(context -> registerSchemaWithResponse(groupName, name, schemaDefinition,
format, context));
}
Mono<Response<SchemaProperties>> registerSchemaWithResponse(String groupName, String name, String schemaDefinition,
SchemaFormat format, Context context) {
if (Objects.isNull(groupName)) {
return monoError(logger, new NullPointerException("'groupName' should not be null."));
} else if (Objects.isNull(name)) {
return monoError(logger, new NullPointerException("'name' should not be null."));
} else if (Objects.isNull(schemaDefinition)) {
return monoError(logger, new NullPointerException("'schemaDefinition' should not be null."));
} else if (Objects.isNull(format)) {
return monoError(logger, new NullPointerException("'format' should not be null."));
}
logger.verbose("Registering schema. Group: '{}', name: '{}', serialization type: '{}', payload: '{}'",
groupName, name, format, schemaDefinition);
final BinaryData binaryData = BinaryData.fromString(schemaDefinition);
final SchemaFormatImpl contentType = SchemaRegistryHelper.getContentType(format);
return restService.getSchemas().registerWithResponseAsync(groupName, name, contentType.toString(), binaryData,
binaryData.getLength(), context)
.map(response -> {
final SchemaProperties registered = SchemaRegistryHelper.getSchemaProperties(response.getDeserializedHeaders(), response.getHeaders(), format);
return new SimpleResponse<>(
response.getRequest(), response.getStatusCode(),
response.getHeaders(), registered);
});
}
/**
* Gets the schema properties of the schema associated with the unique schema id.
*
* @param schemaId The unique identifier of the schema.
*
* @return The {@link SchemaRegistrySchema} associated with the given {@code schemaId}.
*
* @throws NullPointerException if {@code schemaId} is null.
* @throws ResourceNotFoundException if a schema with the matching {@code schemaId} could not be found.
* @throws HttpResponseException if an issue was encountered while fetching the schema.
* @throws UncheckedIOException if an error occurred while deserializing response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SchemaRegistrySchema> getSchema(String schemaId) {
return getSchemaWithResponse(schemaId).map(Response::getValue);
}
/**
* Gets the schema properties of the schema associated with the group name, schema name, and schema version.
*
* @param groupName Group name for the schema
* @param schemaName Name of the schema
* @param schemaVersion Version of schema
*
* @return The {@link SchemaRegistrySchema} matching the parameters.
*
* @throws NullPointerException if {@code groupName} or {@code schemaName} is null.
* @throws ResourceNotFoundException if a schema with the matching {@code groupName} or {@code schemaName} could
* not be found.
* @throws HttpResponseException if an issue was encountered while fetching the schema.
* @throws UncheckedIOException if an error occurred while deserializing response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SchemaRegistrySchema> getSchema(String groupName, String schemaName, int schemaVersion) {
return getSchemaWithResponse(groupName, schemaName, schemaVersion).map(Response::getValue);
}
/**
* Gets the schema properties of the schema associated with the unique schema id.
*
* @param schemaId The unique identifier of the schema.
*
* @return The {@link SchemaRegistrySchema} associated with the given {@code schemaId} along with the HTTP response.
*
* @throws NullPointerException if {@code schemaId} is null.
* @throws ResourceNotFoundException if a schema with the matching {@code schemaId} could not be found.
* @throws HttpResponseException if an issue was encountered while fetching the schema.
* @throws UncheckedIOException if an error occurred while deserializing response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SchemaRegistrySchema>> getSchemaWithResponse(String schemaId) {
return FluxUtil.withContext(context -> getSchemaWithResponse(schemaId, context));
}
/**
* Gets the schema properties of the schema associated with the group name, schema name, and schema version.
*
* @param groupName Group name for the schema
* @param schemaName Name of the schema
* @param schemaVersion Version of schema
*
* @return The {@link SchemaRegistrySchema} matching the parameters.
*
* @throws NullPointerException if {@code groupName} or {@code schemaName} is null.
* @throws ResourceNotFoundException if a schema with the matching {@code groupName} or {@code schemaName} could
* not be found.
* @throws HttpResponseException if an issue was encountered while fetching the schema.
* @throws UncheckedIOException if an error occurred while deserializing response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SchemaRegistrySchema>> getSchemaWithResponse(String groupName, String schemaName,
int schemaVersion) {
return FluxUtil.withContext(context -> getSchemaWithResponse(groupName, schemaName, schemaVersion, context));
}
Mono<Response<SchemaRegistrySchema>> getSchemaWithResponse(String schemaId, Context context) {
if (Objects.isNull(schemaId)) {
return monoError(logger, new NullPointerException("'schemaId' should not be null."));
}
return this.restService.getSchemas().getByIdWithResponseAsync(schemaId, context)
.onErrorMap(ErrorException.class, SchemaRegistryAsyncClient::remapError)
.flatMap(response -> {
final SchemaProperties schemaObject = SchemaRegistryHelper.getSchemaProperties(response.getDeserializedHeaders(), response.getHeaders());
return convertToString(response.getValue())
.map(schema -> new SimpleResponse<>(
response.getRequest(), response.getStatusCode(),
response.getHeaders(), new SchemaRegistrySchema(schemaObject, schema)));
});
}
Mono<Response<SchemaRegistrySchema>> getSchemaWithResponse(String groupName, String schemaName, int schemaVersion,
Context context) {
if (Objects.isNull(groupName)) {
return monoError(logger, new NullPointerException("'groupName' should not be null."));
}
return this.restService.getSchemas().getSchemaVersionWithResponseAsync(groupName, schemaName, schemaVersion,
context)
.onErrorMap(ErrorException.class, SchemaRegistryAsyncClient::remapError)
.flatMap(response -> {
final Flux<ByteBuffer> schemaFlux = response.getValue();
final SchemaProperties schemaObject = SchemaRegistryHelper.getSchemaProperties(response.getDeserializedHeaders(), response.getHeaders());
if (schemaFlux == null) {
return Mono.error(new IllegalArgumentException(String.format(
"Schema definition should not be null. Group Name: %s. Schema Name: %s. Version: %d",
groupName, schemaName, schemaVersion)));
}
return convertToString(schemaFlux)
.map(schema -> new SimpleResponse<>(
response.getRequest(), response.getStatusCode(),
response.getHeaders(), new SchemaRegistrySchema(schemaObject, schema)));
});
}
/**
* Gets the schema identifier associated with the given schema. Gets a cached value if it exists, otherwise makes a
* call to the service.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
*
* @return A mono that completes with the properties for a matching schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code schemaDefinition}, or {@code format}
* is null.
* @throws ResourceNotFoundException if a schema with matching parameters could not be located.
* @throws HttpResponseException if an issue was encountered while finding a matching schema.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SchemaProperties> getSchemaProperties(String groupName, String name, String schemaDefinition,
SchemaFormat format) {
return getSchemaPropertiesWithResponse(groupName, name, schemaDefinition, format)
.map(response -> response.getValue());
}
/**
* Gets the schema identifier associated with the given schema. Always makes a call to the service.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
*
* @return A mono that completes with the properties for a matching schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code schemaDefinition}, or {@code format}
* is null.
* @throws ResourceNotFoundException if a schema with matching parameters could not be located.
* @throws HttpResponseException if an issue was encountered while finding a matching schema.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<SchemaProperties>> getSchemaPropertiesWithResponse(String groupName, String name,
String schemaDefinition, SchemaFormat format) {
return FluxUtil.withContext(context ->
getSchemaPropertiesWithResponse(groupName, name, schemaDefinition, format, context));
}
/**
* Gets the schema id associated with the schema name a string representation of the schema.
*
* @param groupName The schema group.
* @param name The schema name.
* @param schemaDefinition The string representation of the schema.
* @param format The serialization type of this schema.
* @param context Context to pass along with this request.
*
* @return A mono that completes with the properties for a matching schema.
*
* @throws NullPointerException if {@code groupName}, {@code name}, {@code schemaDefinition}, or {@code format}
* is null.
* @throws ResourceNotFoundException if a schema with matching parameters could not be located.
* @throws HttpResponseException if an issue was encountered while finding a matching schema.
*/
Mono<Response<SchemaProperties>> getSchemaPropertiesWithResponse(String groupName, String name,
String schemaDefinition, SchemaFormat format, Context context) {
if (Objects.isNull(groupName)) {
return monoError(logger, new NullPointerException("'groupName' cannot be null."));
} else if (Objects.isNull(name)) {
return monoError(logger, new NullPointerException("'name' cannot be null."));
} else if (Objects.isNull(schemaDefinition)) {
return monoError(logger, new NullPointerException("'schemaDefinition' cannot be null."));
} else if (Objects.isNull(format)) {
return monoError(logger, new NullPointerException("'format' cannot be null."));
}
if (context == null) {
context = Context.NONE;
}
final BinaryData binaryData = BinaryData.fromString(schemaDefinition);
final SchemaFormatImpl contentType = SchemaRegistryHelper.getContentType(format);
return restService.getSchemas()
.queryIdByContentWithResponseAsync(groupName, name, com.azure.data.schemaregistry.implementation.models.SchemaFormat.fromString(contentType.toString()),
binaryData, binaryData.getLength(),
context)
.onErrorMap(ErrorException.class, SchemaRegistryAsyncClient::remapError)
.map(response -> {
final SchemaProperties properties = SchemaRegistryHelper.getSchemaProperties(response.getDeserializedHeaders(), response.getHeaders(), format);
return new SimpleResponse<>(
response.getRequest(), response.getStatusCode(),
response.getHeaders(), properties);
});
}
/**
* Remaps a generic ErrorException to more specific HTTP exceptions.
*
* @param error Error to map.
*
* @return The remapped error.
*/
static HttpResponseException remapError(ErrorException error) {
if (error.getResponse().getStatusCode() == 404) {
final String message;
if (error.getValue() != null && error.getValue().getError() != null) {
message = error.getValue().getError().getMessage();
} else {
message = error.getMessage();
}
return new ResourceNotFoundException(message, error.getResponse(), error);
}
return error;
}
/**
* Converts a Flux of Byte Buffer into its string representation.
*
* @param byteBufferFlux the Byte Buffer Flux input.
*
* @return A string representation.
*
*/
} |
seems like this idToken isn't used by each test case, we can consider making this instantiated in each test method instead. | void setup() {
clientRegistrationBuilder = ClientRegistration
.withRegistrationId("registrationId")
.clientName("registrationId")
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.redirectUri("redirectUri")
.userInfoUri(null)
.clientId("cliendId")
.clientSecret("clientSecret")
.userInfoAuthenticationMethod(AuthenticationMethod.HEADER)
.authorizationUri("authorizationUri")
.tokenUri("tokenUri");
this.accessToken = TestOAuth2AccessTokens.scopes(OidcScopes.OPENID, OidcScopes.PROFILE);
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
this.idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
} | this.idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims); | void setup() {
clientRegistrationBuilder = ClientRegistration
.withRegistrationId("registrationId")
.clientName("registrationId")
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.redirectUri("redirectUri")
.userInfoUri(null)
.clientId("cliendId")
.clientSecret("clientSecret")
.userInfoAuthenticationMethod(AuthenticationMethod.HEADER)
.authorizationUri("authorizationUri")
.tokenUri("tokenUri");
this.accessToken = TestOAuth2AccessTokens.scopes(OidcScopes.OPENID, OidcScopes.PROFILE);
} | class AadOAuth2UserServiceTest {
private ClientRegistration.Builder clientRegistrationBuilder;
private OidcIdToken idToken;
private AadOAuth2UserService aadOAuth2UserService;
private OAuth2AccessToken accessToken;
private Map<String, Object> idTokenClaims = new HashMap<>();
private GraphClient graphClient;
private AadAuthenticationProperties properties;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
@BeforeEach
@Test
void loadUserWhenUserRequestIsNullThenThrowIllegalArgumentException() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
assertThatIllegalArgumentException().isThrownBy(() -> this.aadOAuth2UserService.loadUser(null));
}
@Test
void loadUserFromSession() {
ServletRequestAttributes mockAttributes = mock(ServletRequestAttributes.class, RETURNS_DEEP_STUBS);
DefaultOidcUser mockDefaultOidcUser = mock(DefaultOidcUser.class);
HttpSession mockHttpSession = mock(HttpSession.class);
when(mockHttpSession.getAttribute(DEFAULT_OIDC_USER)).thenReturn(mockDefaultOidcUser);
Authentication mockAuthentication = mock(Authentication.class);
when(mockAttributes.getRequest().getSession(true)).thenReturn(mockHttpSession);
RequestContextHolder.setRequestAttributes(mockAttributes);
SecurityContextHolder.getContext().setAuthentication(mockAuthentication);
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = aadOAuth2UserService
.loadUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user).isEqualTo(mockDefaultOidcUser);
}
@Test
void loadUserWithDefaultAuthority() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(1);
SimpleGrantedAuthority defaultGrantedAuthority = new SimpleGrantedAuthority(AuthorityPrefix.ROLE + "USER");
assertThat(user.getAuthorities().stream().findFirst().get()).isEqualTo(defaultGrantedAuthority);
}
@Test
void loadUserWithCustomAuthorities() {
idTokenClaims.put("roles", Stream.of("role1", "role2")
.collect(Collectors.toList()));
GroupInformation groupInformation = new GroupInformation();
groupInformation.setGroupsIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
groupInformation.setGroupsNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toSet()));
graphClient = mock(GraphClient.class);
when(graphClient.getGroupInformation(anyString())).thenReturn(groupInformation);
properties = new AadAuthenticationProperties();
properties.getUserGroup().setAllowedGroupNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toList()));
properties.getUserGroup().setAllowedGroupIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken,
new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims)));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(6);
Set<SimpleGrantedAuthority> simpleGrantedAuthorities
= Stream.of(new SimpleGrantedAuthority("APPROLE_role1"),
new SimpleGrantedAuthority("APPROLE_role2"),
new SimpleGrantedAuthority("ROLE_groupId1"),
new SimpleGrantedAuthority("ROLE_groupId2"),
new SimpleGrantedAuthority("ROLE_groupName1"),
new SimpleGrantedAuthority("ROLE_groupName2"))
.collect(Collectors.toSet());
assertThat(user.getAuthorities()).isEqualTo(simpleGrantedAuthorities);
}
@Test
void loadUserWhenCustomUserNameAttributeNameThenGetNameReturnsCustomUserName() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.userNameAttributeName(StandardClaimNames.EMAIL)
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getName()).isEqualTo("user1@example.com");
}
@Test
void loadUserWithDefaultUserNameAttributeName() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getName()).isEqualTo("user1");
}
static class TestOAuth2AccessTokens {
private TestOAuth2AccessTokens() {
}
public static OAuth2AccessToken noScopes() {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "no-scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)));
}
public static OAuth2AccessToken scopes(String... scopes) {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)), new HashSet<>(Arrays.asList(scopes)));
}
}
} | class AadOAuth2UserServiceTest {
private ClientRegistration.Builder clientRegistrationBuilder;
private OAuth2AccessToken accessToken;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
@BeforeEach
@Test
void loadUserWhenUserRequestIsNullThenThrowIllegalArgumentException() {
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
assertThatIllegalArgumentException().isThrownBy(() -> aadOAuth2UserService.loadUser(null));
}
@Test
void loadUserFromSession() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ServletRequestAttributes mockAttributes = mock(ServletRequestAttributes.class, RETURNS_DEEP_STUBS);
DefaultOidcUser mockDefaultOidcUser = mock(DefaultOidcUser.class);
HttpSession mockHttpSession = mock(HttpSession.class);
when(mockHttpSession.getAttribute(DEFAULT_OIDC_USER)).thenReturn(mockDefaultOidcUser);
Authentication mockAuthentication = mock(Authentication.class);
when(mockAttributes.getRequest().getSession(true)).thenReturn(mockHttpSession);
RequestContextHolder.setRequestAttributes(mockAttributes);
SecurityContextHolder.getContext().setAuthentication(mockAuthentication);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.loadUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user).isEqualTo(mockDefaultOidcUser);
}
@Test
void loadUserWithDefaultAuthority() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(1);
SimpleGrantedAuthority defaultGrantedAuthority = new SimpleGrantedAuthority(AuthorityPrefix.ROLE + "USER");
assertThat(user.getAuthorities().stream().findFirst().get()).isEqualTo(defaultGrantedAuthority);
}
@Test
void loadUserWhenCustomUserNameAttributeNameThenGetNameReturnsCustomUserName() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.userNameAttributeName(StandardClaimNames.EMAIL)
.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getName()).isEqualTo("user1@example.com");
}
@Test
void loadUserWithDefaultUserNameAttributeName() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getName()).isEqualTo("user1");
}
@Test
void loadUserWithCustomAuthorities() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
idTokenClaims.put("roles", Stream.of("role1", "role2")
.collect(Collectors.toList()));
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
GroupInformation groupInformation = new GroupInformation();
groupInformation.setGroupsIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
groupInformation.setGroupsNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toSet()));
GraphClient graphClient = mock(GraphClient.class);
when(graphClient.getGroupInformation(anyString())).thenReturn(groupInformation);
AadAuthenticationProperties properties = new AadAuthenticationProperties();
properties = new AadAuthenticationProperties();
properties.getUserGroup().setAllowedGroupNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toList()));
properties.getUserGroup().setAllowedGroupIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(6);
Set<SimpleGrantedAuthority> simpleGrantedAuthorities
= Stream.of(new SimpleGrantedAuthority("APPROLE_role1"),
new SimpleGrantedAuthority("APPROLE_role2"),
new SimpleGrantedAuthority("ROLE_groupId1"),
new SimpleGrantedAuthority("ROLE_groupId2"),
new SimpleGrantedAuthority("ROLE_groupName1"),
new SimpleGrantedAuthority("ROLE_groupName2"))
.collect(Collectors.toSet());
assertThat(user.getAuthorities()).isEqualTo(simpleGrantedAuthorities);
}
static final class TestOAuth2AccessTokens {
private TestOAuth2AccessTokens() {
}
static OAuth2AccessToken noScopes() {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "no-scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)));
}
static OAuth2AccessToken scopes(String... scopes) {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)), new HashSet<>(Arrays.asList(scopes)));
}
}
} |
I think loadUserWithCustomAuthorities() is the special one. Other test cases could leverage the code in setup(). | void setup() {
clientRegistrationBuilder = ClientRegistration
.withRegistrationId("registrationId")
.clientName("registrationId")
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.redirectUri("redirectUri")
.userInfoUri(null)
.clientId("cliendId")
.clientSecret("clientSecret")
.userInfoAuthenticationMethod(AuthenticationMethod.HEADER)
.authorizationUri("authorizationUri")
.tokenUri("tokenUri");
this.accessToken = TestOAuth2AccessTokens.scopes(OidcScopes.OPENID, OidcScopes.PROFILE);
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
this.idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
} | this.idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims); | void setup() {
clientRegistrationBuilder = ClientRegistration
.withRegistrationId("registrationId")
.clientName("registrationId")
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.redirectUri("redirectUri")
.userInfoUri(null)
.clientId("cliendId")
.clientSecret("clientSecret")
.userInfoAuthenticationMethod(AuthenticationMethod.HEADER)
.authorizationUri("authorizationUri")
.tokenUri("tokenUri");
this.accessToken = TestOAuth2AccessTokens.scopes(OidcScopes.OPENID, OidcScopes.PROFILE);
} | class AadOAuth2UserServiceTest {
private ClientRegistration.Builder clientRegistrationBuilder;
private OidcIdToken idToken;
private AadOAuth2UserService aadOAuth2UserService;
private OAuth2AccessToken accessToken;
private Map<String, Object> idTokenClaims = new HashMap<>();
private GraphClient graphClient;
private AadAuthenticationProperties properties;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
@BeforeEach
@Test
void loadUserWhenUserRequestIsNullThenThrowIllegalArgumentException() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
assertThatIllegalArgumentException().isThrownBy(() -> this.aadOAuth2UserService.loadUser(null));
}
@Test
void loadUserFromSession() {
ServletRequestAttributes mockAttributes = mock(ServletRequestAttributes.class, RETURNS_DEEP_STUBS);
DefaultOidcUser mockDefaultOidcUser = mock(DefaultOidcUser.class);
HttpSession mockHttpSession = mock(HttpSession.class);
when(mockHttpSession.getAttribute(DEFAULT_OIDC_USER)).thenReturn(mockDefaultOidcUser);
Authentication mockAuthentication = mock(Authentication.class);
when(mockAttributes.getRequest().getSession(true)).thenReturn(mockHttpSession);
RequestContextHolder.setRequestAttributes(mockAttributes);
SecurityContextHolder.getContext().setAuthentication(mockAuthentication);
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = aadOAuth2UserService
.loadUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user).isEqualTo(mockDefaultOidcUser);
}
@Test
void loadUserWithDefaultAuthority() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(1);
SimpleGrantedAuthority defaultGrantedAuthority = new SimpleGrantedAuthority(AuthorityPrefix.ROLE + "USER");
assertThat(user.getAuthorities().stream().findFirst().get()).isEqualTo(defaultGrantedAuthority);
}
@Test
void loadUserWithCustomAuthorities() {
idTokenClaims.put("roles", Stream.of("role1", "role2")
.collect(Collectors.toList()));
GroupInformation groupInformation = new GroupInformation();
groupInformation.setGroupsIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
groupInformation.setGroupsNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toSet()));
graphClient = mock(GraphClient.class);
when(graphClient.getGroupInformation(anyString())).thenReturn(groupInformation);
properties = new AadAuthenticationProperties();
properties.getUserGroup().setAllowedGroupNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toList()));
properties.getUserGroup().setAllowedGroupIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken,
new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims)));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(6);
Set<SimpleGrantedAuthority> simpleGrantedAuthorities
= Stream.of(new SimpleGrantedAuthority("APPROLE_role1"),
new SimpleGrantedAuthority("APPROLE_role2"),
new SimpleGrantedAuthority("ROLE_groupId1"),
new SimpleGrantedAuthority("ROLE_groupId2"),
new SimpleGrantedAuthority("ROLE_groupName1"),
new SimpleGrantedAuthority("ROLE_groupName2"))
.collect(Collectors.toSet());
assertThat(user.getAuthorities()).isEqualTo(simpleGrantedAuthorities);
}
@Test
void loadUserWhenCustomUserNameAttributeNameThenGetNameReturnsCustomUserName() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.userNameAttributeName(StandardClaimNames.EMAIL)
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getName()).isEqualTo("user1@example.com");
}
@Test
void loadUserWithDefaultUserNameAttributeName() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getName()).isEqualTo("user1");
}
static class TestOAuth2AccessTokens {
private TestOAuth2AccessTokens() {
}
public static OAuth2AccessToken noScopes() {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "no-scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)));
}
public static OAuth2AccessToken scopes(String... scopes) {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)), new HashSet<>(Arrays.asList(scopes)));
}
}
} | class AadOAuth2UserServiceTest {
private ClientRegistration.Builder clientRegistrationBuilder;
private OAuth2AccessToken accessToken;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
@BeforeEach
@Test
void loadUserWhenUserRequestIsNullThenThrowIllegalArgumentException() {
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
assertThatIllegalArgumentException().isThrownBy(() -> aadOAuth2UserService.loadUser(null));
}
@Test
void loadUserFromSession() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ServletRequestAttributes mockAttributes = mock(ServletRequestAttributes.class, RETURNS_DEEP_STUBS);
DefaultOidcUser mockDefaultOidcUser = mock(DefaultOidcUser.class);
HttpSession mockHttpSession = mock(HttpSession.class);
when(mockHttpSession.getAttribute(DEFAULT_OIDC_USER)).thenReturn(mockDefaultOidcUser);
Authentication mockAuthentication = mock(Authentication.class);
when(mockAttributes.getRequest().getSession(true)).thenReturn(mockHttpSession);
RequestContextHolder.setRequestAttributes(mockAttributes);
SecurityContextHolder.getContext().setAuthentication(mockAuthentication);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.loadUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user).isEqualTo(mockDefaultOidcUser);
}
@Test
void loadUserWithDefaultAuthority() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(1);
SimpleGrantedAuthority defaultGrantedAuthority = new SimpleGrantedAuthority(AuthorityPrefix.ROLE + "USER");
assertThat(user.getAuthorities().stream().findFirst().get()).isEqualTo(defaultGrantedAuthority);
}
@Test
void loadUserWhenCustomUserNameAttributeNameThenGetNameReturnsCustomUserName() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.userNameAttributeName(StandardClaimNames.EMAIL)
.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getName()).isEqualTo("user1@example.com");
}
@Test
void loadUserWithDefaultUserNameAttributeName() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getName()).isEqualTo("user1");
}
@Test
void loadUserWithCustomAuthorities() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
idTokenClaims.put("roles", Stream.of("role1", "role2")
.collect(Collectors.toList()));
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
GroupInformation groupInformation = new GroupInformation();
groupInformation.setGroupsIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
groupInformation.setGroupsNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toSet()));
GraphClient graphClient = mock(GraphClient.class);
when(graphClient.getGroupInformation(anyString())).thenReturn(groupInformation);
AadAuthenticationProperties properties = new AadAuthenticationProperties();
properties = new AadAuthenticationProperties();
properties.getUserGroup().setAllowedGroupNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toList()));
properties.getUserGroup().setAllowedGroupIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(6);
Set<SimpleGrantedAuthority> simpleGrantedAuthorities
= Stream.of(new SimpleGrantedAuthority("APPROLE_role1"),
new SimpleGrantedAuthority("APPROLE_role2"),
new SimpleGrantedAuthority("ROLE_groupId1"),
new SimpleGrantedAuthority("ROLE_groupId2"),
new SimpleGrantedAuthority("ROLE_groupName1"),
new SimpleGrantedAuthority("ROLE_groupName2"))
.collect(Collectors.toSet());
assertThat(user.getAuthorities()).isEqualTo(simpleGrantedAuthorities);
}
static final class TestOAuth2AccessTokens {
private TestOAuth2AccessTokens() {
}
static OAuth2AccessToken noScopes() {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "no-scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)));
}
static OAuth2AccessToken scopes(String... scopes) {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)), new HashSet<>(Arrays.asList(scopes)));
}
}
} |
If a variable is not fit for all cases, we should narrow the scope to method. It's okay to have some duplication in the UT. Which will make each test case easy to read. | void setup() {
clientRegistrationBuilder = ClientRegistration
.withRegistrationId("registrationId")
.clientName("registrationId")
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.redirectUri("redirectUri")
.userInfoUri(null)
.clientId("cliendId")
.clientSecret("clientSecret")
.userInfoAuthenticationMethod(AuthenticationMethod.HEADER)
.authorizationUri("authorizationUri")
.tokenUri("tokenUri");
this.accessToken = TestOAuth2AccessTokens.scopes(OidcScopes.OPENID, OidcScopes.PROFILE);
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
this.idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
} | this.idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims); | void setup() {
clientRegistrationBuilder = ClientRegistration
.withRegistrationId("registrationId")
.clientName("registrationId")
.authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
.redirectUri("redirectUri")
.userInfoUri(null)
.clientId("cliendId")
.clientSecret("clientSecret")
.userInfoAuthenticationMethod(AuthenticationMethod.HEADER)
.authorizationUri("authorizationUri")
.tokenUri("tokenUri");
this.accessToken = TestOAuth2AccessTokens.scopes(OidcScopes.OPENID, OidcScopes.PROFILE);
} | class AadOAuth2UserServiceTest {
private ClientRegistration.Builder clientRegistrationBuilder;
private OidcIdToken idToken;
private AadOAuth2UserService aadOAuth2UserService;
private OAuth2AccessToken accessToken;
private Map<String, Object> idTokenClaims = new HashMap<>();
private GraphClient graphClient;
private AadAuthenticationProperties properties;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
@BeforeEach
@Test
void loadUserWhenUserRequestIsNullThenThrowIllegalArgumentException() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
assertThatIllegalArgumentException().isThrownBy(() -> this.aadOAuth2UserService.loadUser(null));
}
@Test
void loadUserFromSession() {
ServletRequestAttributes mockAttributes = mock(ServletRequestAttributes.class, RETURNS_DEEP_STUBS);
DefaultOidcUser mockDefaultOidcUser = mock(DefaultOidcUser.class);
HttpSession mockHttpSession = mock(HttpSession.class);
when(mockHttpSession.getAttribute(DEFAULT_OIDC_USER)).thenReturn(mockDefaultOidcUser);
Authentication mockAuthentication = mock(Authentication.class);
when(mockAttributes.getRequest().getSession(true)).thenReturn(mockHttpSession);
RequestContextHolder.setRequestAttributes(mockAttributes);
SecurityContextHolder.getContext().setAuthentication(mockAuthentication);
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = aadOAuth2UserService
.loadUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user).isEqualTo(mockDefaultOidcUser);
}
@Test
void loadUserWithDefaultAuthority() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(1);
SimpleGrantedAuthority defaultGrantedAuthority = new SimpleGrantedAuthority(AuthorityPrefix.ROLE + "USER");
assertThat(user.getAuthorities().stream().findFirst().get()).isEqualTo(defaultGrantedAuthority);
}
@Test
void loadUserWithCustomAuthorities() {
idTokenClaims.put("roles", Stream.of("role1", "role2")
.collect(Collectors.toList()));
GroupInformation groupInformation = new GroupInformation();
groupInformation.setGroupsIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
groupInformation.setGroupsNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toSet()));
graphClient = mock(GraphClient.class);
when(graphClient.getGroupInformation(anyString())).thenReturn(groupInformation);
properties = new AadAuthenticationProperties();
properties.getUserGroup().setAllowedGroupNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toList()));
properties.getUserGroup().setAllowedGroupIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken,
new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims)));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(6);
Set<SimpleGrantedAuthority> simpleGrantedAuthorities
= Stream.of(new SimpleGrantedAuthority("APPROLE_role1"),
new SimpleGrantedAuthority("APPROLE_role2"),
new SimpleGrantedAuthority("ROLE_groupId1"),
new SimpleGrantedAuthority("ROLE_groupId2"),
new SimpleGrantedAuthority("ROLE_groupName1"),
new SimpleGrantedAuthority("ROLE_groupName2"))
.collect(Collectors.toSet());
assertThat(user.getAuthorities()).isEqualTo(simpleGrantedAuthorities);
}
@Test
void loadUserWhenCustomUserNameAttributeNameThenGetNameReturnsCustomUserName() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.userNameAttributeName(StandardClaimNames.EMAIL)
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getName()).isEqualTo("user1@example.com");
}
@Test
void loadUserWithDefaultUserNameAttributeName() {
aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
OidcUser user = this.aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, this.idToken));
assertThat(user.getName()).isEqualTo("user1");
}
static class TestOAuth2AccessTokens {
private TestOAuth2AccessTokens() {
}
public static OAuth2AccessToken noScopes() {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "no-scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)));
}
public static OAuth2AccessToken scopes(String... scopes) {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)), new HashSet<>(Arrays.asList(scopes)));
}
}
} | class AadOAuth2UserServiceTest {
private ClientRegistration.Builder clientRegistrationBuilder;
private OAuth2AccessToken accessToken;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
@BeforeEach
@Test
void loadUserWhenUserRequestIsNullThenThrowIllegalArgumentException() {
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
assertThatIllegalArgumentException().isThrownBy(() -> aadOAuth2UserService.loadUser(null));
}
@Test
void loadUserFromSession() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ServletRequestAttributes mockAttributes = mock(ServletRequestAttributes.class, RETURNS_DEEP_STUBS);
DefaultOidcUser mockDefaultOidcUser = mock(DefaultOidcUser.class);
HttpSession mockHttpSession = mock(HttpSession.class);
when(mockHttpSession.getAttribute(DEFAULT_OIDC_USER)).thenReturn(mockDefaultOidcUser);
Authentication mockAuthentication = mock(Authentication.class);
when(mockAttributes.getRequest().getSession(true)).thenReturn(mockHttpSession);
RequestContextHolder.setRequestAttributes(mockAttributes);
SecurityContextHolder.getContext().setAuthentication(mockAuthentication);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.loadUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user).isEqualTo(mockDefaultOidcUser);
}
@Test
void loadUserWithDefaultAuthority() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(1);
SimpleGrantedAuthority defaultGrantedAuthority = new SimpleGrantedAuthority(AuthorityPrefix.ROLE + "USER");
assertThat(user.getAuthorities().stream().findFirst().get()).isEqualTo(defaultGrantedAuthority);
}
@Test
void loadUserWhenCustomUserNameAttributeNameThenGetNameReturnsCustomUserName() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.userNameAttributeName(StandardClaimNames.EMAIL)
.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getName()).isEqualTo("user1@example.com");
}
@Test
void loadUserWithDefaultUserNameAttributeName() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
ClientRegistration clientRegistration = this.clientRegistrationBuilder.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(null, null, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getName()).isEqualTo("user1");
}
@Test
void loadUserWithCustomAuthorities() {
Map<String, Object> idTokenClaims = new HashMap<>();
idTokenClaims.put(IdTokenClaimNames.ISS, "https:
idTokenClaims.put(IdTokenClaimNames.SUB, "subject1");
idTokenClaims.put(StandardClaimNames.NAME, "user1");
idTokenClaims.put(StandardClaimNames.EMAIL, "user1@example.com");
idTokenClaims.put("roles", Stream.of("role1", "role2")
.collect(Collectors.toList()));
OidcIdToken idToken = new OidcIdToken("access-token", Instant.MIN, Instant.MAX, idTokenClaims);
GroupInformation groupInformation = new GroupInformation();
groupInformation.setGroupsIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
groupInformation.setGroupsNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toSet()));
GraphClient graphClient = mock(GraphClient.class);
when(graphClient.getGroupInformation(anyString())).thenReturn(groupInformation);
AadAuthenticationProperties properties = new AadAuthenticationProperties();
properties = new AadAuthenticationProperties();
properties.getUserGroup().setAllowedGroupNames(Stream.of("groupName1", "groupName2")
.collect(Collectors.toList()));
properties.getUserGroup().setAllowedGroupIds(Stream.of("groupId1", "groupId2")
.collect(Collectors.toSet()));
ClientRegistration clientRegistration = this.clientRegistrationBuilder
.build();
AadOAuth2UserService aadOAuth2UserService = new AadOAuth2UserService(properties, graphClient, null);
OidcUser user = aadOAuth2UserService
.getUser(new OidcUserRequest(clientRegistration, this.accessToken, idToken));
assertThat(user.getUserInfo()).isNull();
assertThat(user.getClaims()).isEqualTo(idTokenClaims);
assertThat(user.getAuthorities().size()).isEqualTo(6);
Set<SimpleGrantedAuthority> simpleGrantedAuthorities
= Stream.of(new SimpleGrantedAuthority("APPROLE_role1"),
new SimpleGrantedAuthority("APPROLE_role2"),
new SimpleGrantedAuthority("ROLE_groupId1"),
new SimpleGrantedAuthority("ROLE_groupId2"),
new SimpleGrantedAuthority("ROLE_groupName1"),
new SimpleGrantedAuthority("ROLE_groupName2"))
.collect(Collectors.toSet());
assertThat(user.getAuthorities()).isEqualTo(simpleGrantedAuthorities);
}
static final class TestOAuth2AccessTokens {
private TestOAuth2AccessTokens() {
}
static OAuth2AccessToken noScopes() {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "no-scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)));
}
static OAuth2AccessToken scopes(String... scopes) {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)), new HashSet<>(Arrays.asList(scopes)));
}
}
} |
Why not move this checking up to the top? | public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
Assert.notNull(userRequest, "userRequest cannot be null");
ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes();
HttpSession session = attr.getRequest().getSession(true);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities());
return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER);
}
DefaultOidcUser defaultOidcUser = getUser(userRequest);
session.setAttribute(DEFAULT_OIDC_USER, defaultOidcUser);
return defaultOidcUser;
} | } | public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
Assert.notNull(userRequest, "userRequest cannot be null");
ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes();
HttpSession session = attr.getRequest().getSession(true);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities());
return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER);
}
DefaultOidcUser defaultOidcUser = getUser(userRequest);
session.setAttribute(DEFAULT_OIDC_USER, defaultOidcUser);
return defaultOidcUser;
} | class AadOAuth2UserService implements OAuth2UserService<OidcUserRequest, OidcUser> {
private static final Logger LOGGER = LoggerFactory.getLogger(AadOAuth2UserService.class);
private final List<String> allowedGroupNames;
private final Set<String> allowedGroupIds;
private final GraphClient graphClient;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
private static final String ROLES = "roles";
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties, RestTemplateBuilder restTemplateBuilder) {
this(properties, new GraphClient(properties, restTemplateBuilder));
}
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param graphClient the graph client
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient,
RestTemplateBuilder restTemplateBuilder) {
this(properties, graphClient);
}
private AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient) {
allowedGroupNames = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupNames)
.orElseGet(Collections::emptyList);
allowedGroupIds = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupIds)
.orElseGet(Collections::emptySet);
this.graphClient = graphClient;
}
/**
* Returns a {@link DefaultOidcUser} instance.
* <p/>
*
* The {@link DefaultOidcUser} instance is constructed with {@link GrantedAuthority}, {@link OidcIdToken} and nameAttributeKey.
* <a href="https:
* this implementation will not get userinfo from the UserInfo Endpoint. Calling {@link org.springframework.security.oauth2.core.oidc.user.OidcUser
*
* <p/>
*
* @param userRequest the user request
*
* @return a {@link DefaultOidcUser} instance.
*
* @throws OAuth2AuthenticationException if an error occurs.
*/
@Override
DefaultOidcUser getUser(OidcUserRequest userRequest) {
Set<SimpleGrantedAuthority> authorities = buildAuthorities(userRequest);
String nameAttributeKey = getNameAttributeKey(userRequest);
OidcIdToken idToken = userRequest.getIdToken();
DefaultOidcUser defaultOidcUser = new DefaultOidcUser(authorities, idToken, nameAttributeKey);
return defaultOidcUser;
}
private String getNameAttributeKey(OidcUserRequest userRequest) {
return Optional.of(userRequest)
.map(u -> u.getClientRegistration())
.map(u -> u.getProviderDetails())
.map(u -> u.getUserInfoEndpoint())
.map(u -> u.getUserNameAttributeName())
.filter(StringUtils::hasText)
.orElse(AadJwtClaimNames.NAME);
}
private Set<SimpleGrantedAuthority> buildAuthorities(OidcUserRequest userRequest) {
Set<String> authorityStrings = new HashSet<>();
authorityStrings.addAll(extractRolesFromIdToken(userRequest.getIdToken()));
authorityStrings.addAll(extractGroupRolesFromAccessToken(userRequest.getAccessToken()));
Set<SimpleGrantedAuthority> authorities = authorityStrings.stream()
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
if (authorities.isEmpty()) {
authorities = DEFAULT_AUTHORITY_SET;
}
return authorities;
}
/**
* Extract roles from idToken.
*
* @return roles the roles
*/
Set<String> extractRolesFromIdToken(OidcIdToken idToken) {
return Optional.ofNullable(idToken)
.map(token -> (Collection<?>) token.getClaim(ROLES))
.filter(obj -> obj instanceof List<?>)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(s -> StringUtils.hasText(s.toString()))
.map(role -> AuthorityPrefix.APP_ROLE + role)
.collect(Collectors.toSet());
}
/**
* Extract group roles from accessToken.
*
* @return roles the group roles
*/
Set<String> extractGroupRolesFromAccessToken(OAuth2AccessToken accessToken) {
if (allowedGroupNames.isEmpty() && allowedGroupIds.isEmpty()) {
return Collections.emptySet();
}
Set<String> roles = new HashSet<>();
GroupInformation groupInformation = getGroupInformation(accessToken);
if (!allowedGroupNames.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsNames)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(allowedGroupNames::contains)
.forEach(roles::add);
}
if (!allowedGroupIds.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsIds)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(this::isAllowedGroupId)
.forEach(roles::add);
}
return roles.stream()
.map(roleStr -> AuthorityPrefix.ROLE + roleStr)
.collect(Collectors.toSet());
}
private boolean isAllowedGroupId(String groupId) {
if (allowedGroupIds.size() == 1 && allowedGroupIds.contains("all")) {
return true;
}
return allowedGroupIds.contains(groupId);
}
private GroupInformation getGroupInformation(OAuth2AccessToken accessToken) {
return Optional.of(accessToken)
.map(AbstractOAuth2Token::getTokenValue)
.map(graphClient::getGroupInformation)
.orElseGet(GroupInformation::new);
}
} | class AadOAuth2UserService implements OAuth2UserService<OidcUserRequest, OidcUser> {
private static final Logger LOGGER = LoggerFactory.getLogger(AadOAuth2UserService.class);
private final List<String> allowedGroupNames;
private final Set<String> allowedGroupIds;
private final GraphClient graphClient;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
private static final String ROLES = "roles";
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties, RestTemplateBuilder restTemplateBuilder) {
this(properties, new GraphClient(properties, restTemplateBuilder));
}
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param graphClient the graph client
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient,
RestTemplateBuilder restTemplateBuilder) {
this(properties, graphClient);
}
private AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient) {
allowedGroupNames = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupNames)
.orElseGet(Collections::emptyList);
allowedGroupIds = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupIds)
.orElseGet(Collections::emptySet);
this.graphClient = graphClient;
}
/**
* Returns a {@link DefaultOidcUser} instance.
* <p/>
*
* The {@link DefaultOidcUser} instance is constructed with {@link GrantedAuthority}, {@link OidcIdToken} and nameAttributeKey.
* <a href="https:
* this implementation will not get userinfo from the UserInfo Endpoint. Calling {@link org.springframework.security.oauth2.core.oidc.user.OidcUser
*
* <p/>
*
* @param userRequest the user request
*
* @return a {@link DefaultOidcUser} instance.
*
* @throws OAuth2AuthenticationException if an error occurs.
*/
@Override
DefaultOidcUser getUser(OidcUserRequest userRequest) {
Set<SimpleGrantedAuthority> authorities = buildAuthorities(userRequest);
String nameAttributeKey = getNameAttributeKey(userRequest);
OidcIdToken idToken = userRequest.getIdToken();
DefaultOidcUser defaultOidcUser = new DefaultOidcUser(authorities, idToken, nameAttributeKey);
return defaultOidcUser;
}
private String getNameAttributeKey(OidcUserRequest userRequest) {
return Optional.of(userRequest)
.map(u -> u.getClientRegistration())
.map(u -> u.getProviderDetails())
.map(u -> u.getUserInfoEndpoint())
.map(u -> u.getUserNameAttributeName())
.filter(StringUtils::hasText)
.orElse(AadJwtClaimNames.NAME);
}
private Set<SimpleGrantedAuthority> buildAuthorities(OidcUserRequest userRequest) {
Set<String> authorityStrings = new HashSet<>();
authorityStrings.addAll(extractRolesFromIdToken(userRequest.getIdToken()));
authorityStrings.addAll(extractGroupRolesFromAccessToken(userRequest.getAccessToken()));
Set<SimpleGrantedAuthority> authorities = authorityStrings.stream()
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
if (authorities.isEmpty()) {
authorities = DEFAULT_AUTHORITY_SET;
}
return authorities;
}
/**
* Extract roles from idToken.
*
* @return roles the roles
*/
Set<String> extractRolesFromIdToken(OidcIdToken idToken) {
return Optional.ofNullable(idToken)
.map(token -> (Collection<?>) token.getClaim(ROLES))
.filter(obj -> obj instanceof List<?>)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(s -> StringUtils.hasText(s.toString()))
.map(role -> AuthorityPrefix.APP_ROLE + role)
.collect(Collectors.toSet());
}
/**
* Extract group roles from accessToken.
*
* @return roles the group roles
*/
Set<String> extractGroupRolesFromAccessToken(OAuth2AccessToken accessToken) {
if (allowedGroupNames.isEmpty() && allowedGroupIds.isEmpty()) {
return Collections.emptySet();
}
Set<String> roles = new HashSet<>();
GroupInformation groupInformation = getGroupInformation(accessToken);
if (!allowedGroupNames.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsNames)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(allowedGroupNames::contains)
.forEach(roles::add);
}
if (!allowedGroupIds.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsIds)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(this::isAllowedGroupId)
.forEach(roles::add);
}
return roles.stream()
.map(roleStr -> AuthorityPrefix.ROLE + roleStr)
.collect(Collectors.toSet());
}
private boolean isAllowedGroupId(String groupId) {
if (allowedGroupIds.size() == 1 && allowedGroupIds.contains("all")) {
return true;
}
return allowedGroupIds.contains(groupId);
}
private GroupInformation getGroupInformation(OAuth2AccessToken accessToken) {
return Optional.of(accessToken)
.map(AbstractOAuth2Token::getTokenValue)
.map(graphClient::getGroupInformation)
.orElseGet(GroupInformation::new);
}
} |
can you make it more clear? | public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
Assert.notNull(userRequest, "userRequest cannot be null");
ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes();
HttpSession session = attr.getRequest().getSession(true);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities());
return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER);
}
DefaultOidcUser defaultOidcUser = getUser(userRequest);
session.setAttribute(DEFAULT_OIDC_USER, defaultOidcUser);
return defaultOidcUser;
} | } | public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
Assert.notNull(userRequest, "userRequest cannot be null");
ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes();
HttpSession session = attr.getRequest().getSession(true);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities());
return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER);
}
DefaultOidcUser defaultOidcUser = getUser(userRequest);
session.setAttribute(DEFAULT_OIDC_USER, defaultOidcUser);
return defaultOidcUser;
} | class AadOAuth2UserService implements OAuth2UserService<OidcUserRequest, OidcUser> {
private static final Logger LOGGER = LoggerFactory.getLogger(AadOAuth2UserService.class);
private final List<String> allowedGroupNames;
private final Set<String> allowedGroupIds;
private final GraphClient graphClient;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
private static final String ROLES = "roles";
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties, RestTemplateBuilder restTemplateBuilder) {
this(properties, new GraphClient(properties, restTemplateBuilder));
}
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param graphClient the graph client
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient,
RestTemplateBuilder restTemplateBuilder) {
this(properties, graphClient);
}
private AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient) {
allowedGroupNames = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupNames)
.orElseGet(Collections::emptyList);
allowedGroupIds = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupIds)
.orElseGet(Collections::emptySet);
this.graphClient = graphClient;
}
/**
* Returns a {@link DefaultOidcUser} instance.
* <p/>
*
* The {@link DefaultOidcUser} instance is constructed with {@link GrantedAuthority}, {@link OidcIdToken} and nameAttributeKey.
* <a href="https:
* this implementation will not get userinfo from the UserInfo Endpoint. Calling {@link org.springframework.security.oauth2.core.oidc.user.OidcUser
*
* <p/>
*
* @param userRequest the user request
*
* @return a {@link DefaultOidcUser} instance.
*
* @throws OAuth2AuthenticationException if an error occurs.
*/
@Override
DefaultOidcUser getUser(OidcUserRequest userRequest) {
Set<SimpleGrantedAuthority> authorities = buildAuthorities(userRequest);
String nameAttributeKey = getNameAttributeKey(userRequest);
OidcIdToken idToken = userRequest.getIdToken();
DefaultOidcUser defaultOidcUser = new DefaultOidcUser(authorities, idToken, nameAttributeKey);
return defaultOidcUser;
}
private String getNameAttributeKey(OidcUserRequest userRequest) {
return Optional.of(userRequest)
.map(u -> u.getClientRegistration())
.map(u -> u.getProviderDetails())
.map(u -> u.getUserInfoEndpoint())
.map(u -> u.getUserNameAttributeName())
.filter(StringUtils::hasText)
.orElse(AadJwtClaimNames.NAME);
}
private Set<SimpleGrantedAuthority> buildAuthorities(OidcUserRequest userRequest) {
Set<String> authorityStrings = new HashSet<>();
authorityStrings.addAll(extractRolesFromIdToken(userRequest.getIdToken()));
authorityStrings.addAll(extractGroupRolesFromAccessToken(userRequest.getAccessToken()));
Set<SimpleGrantedAuthority> authorities = authorityStrings.stream()
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
if (authorities.isEmpty()) {
authorities = DEFAULT_AUTHORITY_SET;
}
return authorities;
}
/**
* Extract roles from idToken.
*
* @return roles the roles
*/
Set<String> extractRolesFromIdToken(OidcIdToken idToken) {
return Optional.ofNullable(idToken)
.map(token -> (Collection<?>) token.getClaim(ROLES))
.filter(obj -> obj instanceof List<?>)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(s -> StringUtils.hasText(s.toString()))
.map(role -> AuthorityPrefix.APP_ROLE + role)
.collect(Collectors.toSet());
}
/**
* Extract group roles from accessToken.
*
* @return roles the group roles
*/
Set<String> extractGroupRolesFromAccessToken(OAuth2AccessToken accessToken) {
if (allowedGroupNames.isEmpty() && allowedGroupIds.isEmpty()) {
return Collections.emptySet();
}
Set<String> roles = new HashSet<>();
GroupInformation groupInformation = getGroupInformation(accessToken);
if (!allowedGroupNames.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsNames)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(allowedGroupNames::contains)
.forEach(roles::add);
}
if (!allowedGroupIds.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsIds)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(this::isAllowedGroupId)
.forEach(roles::add);
}
return roles.stream()
.map(roleStr -> AuthorityPrefix.ROLE + roleStr)
.collect(Collectors.toSet());
}
private boolean isAllowedGroupId(String groupId) {
if (allowedGroupIds.size() == 1 && allowedGroupIds.contains("all")) {
return true;
}
return allowedGroupIds.contains(groupId);
}
private GroupInformation getGroupInformation(OAuth2AccessToken accessToken) {
return Optional.of(accessToken)
.map(AbstractOAuth2Token::getTokenValue)
.map(graphClient::getGroupInformation)
.orElseGet(GroupInformation::new);
}
} | class AadOAuth2UserService implements OAuth2UserService<OidcUserRequest, OidcUser> {
private static final Logger LOGGER = LoggerFactory.getLogger(AadOAuth2UserService.class);
private final List<String> allowedGroupNames;
private final Set<String> allowedGroupIds;
private final GraphClient graphClient;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
private static final String ROLES = "roles";
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties, RestTemplateBuilder restTemplateBuilder) {
this(properties, new GraphClient(properties, restTemplateBuilder));
}
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param graphClient the graph client
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient,
RestTemplateBuilder restTemplateBuilder) {
this(properties, graphClient);
}
private AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient) {
allowedGroupNames = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupNames)
.orElseGet(Collections::emptyList);
allowedGroupIds = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupIds)
.orElseGet(Collections::emptySet);
this.graphClient = graphClient;
}
/**
* Returns a {@link DefaultOidcUser} instance.
* <p/>
*
* The {@link DefaultOidcUser} instance is constructed with {@link GrantedAuthority}, {@link OidcIdToken} and nameAttributeKey.
* <a href="https:
* this implementation will not get userinfo from the UserInfo Endpoint. Calling {@link org.springframework.security.oauth2.core.oidc.user.OidcUser
*
* <p/>
*
* @param userRequest the user request
*
* @return a {@link DefaultOidcUser} instance.
*
* @throws OAuth2AuthenticationException if an error occurs.
*/
@Override
DefaultOidcUser getUser(OidcUserRequest userRequest) {
Set<SimpleGrantedAuthority> authorities = buildAuthorities(userRequest);
String nameAttributeKey = getNameAttributeKey(userRequest);
OidcIdToken idToken = userRequest.getIdToken();
DefaultOidcUser defaultOidcUser = new DefaultOidcUser(authorities, idToken, nameAttributeKey);
return defaultOidcUser;
}
private String getNameAttributeKey(OidcUserRequest userRequest) {
return Optional.of(userRequest)
.map(u -> u.getClientRegistration())
.map(u -> u.getProviderDetails())
.map(u -> u.getUserInfoEndpoint())
.map(u -> u.getUserNameAttributeName())
.filter(StringUtils::hasText)
.orElse(AadJwtClaimNames.NAME);
}
private Set<SimpleGrantedAuthority> buildAuthorities(OidcUserRequest userRequest) {
Set<String> authorityStrings = new HashSet<>();
authorityStrings.addAll(extractRolesFromIdToken(userRequest.getIdToken()));
authorityStrings.addAll(extractGroupRolesFromAccessToken(userRequest.getAccessToken()));
Set<SimpleGrantedAuthority> authorities = authorityStrings.stream()
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
if (authorities.isEmpty()) {
authorities = DEFAULT_AUTHORITY_SET;
}
return authorities;
}
/**
* Extract roles from idToken.
*
* @return roles the roles
*/
Set<String> extractRolesFromIdToken(OidcIdToken idToken) {
return Optional.ofNullable(idToken)
.map(token -> (Collection<?>) token.getClaim(ROLES))
.filter(obj -> obj instanceof List<?>)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(s -> StringUtils.hasText(s.toString()))
.map(role -> AuthorityPrefix.APP_ROLE + role)
.collect(Collectors.toSet());
}
/**
* Extract group roles from accessToken.
*
* @return roles the group roles
*/
Set<String> extractGroupRolesFromAccessToken(OAuth2AccessToken accessToken) {
if (allowedGroupNames.isEmpty() && allowedGroupIds.isEmpty()) {
return Collections.emptySet();
}
Set<String> roles = new HashSet<>();
GroupInformation groupInformation = getGroupInformation(accessToken);
if (!allowedGroupNames.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsNames)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(allowedGroupNames::contains)
.forEach(roles::add);
}
if (!allowedGroupIds.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsIds)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(this::isAllowedGroupId)
.forEach(roles::add);
}
return roles.stream()
.map(roleStr -> AuthorityPrefix.ROLE + roleStr)
.collect(Collectors.toSet());
}
private boolean isAllowedGroupId(String groupId) {
if (allowedGroupIds.size() == 1 && allowedGroupIds.contains("all")) {
return true;
}
return allowedGroupIds.contains(groupId);
}
private GroupInformation getGroupInformation(OAuth2AccessToken accessToken) {
return Optional.of(accessToken)
.map(AbstractOAuth2Token::getTokenValue)
.map(graphClient::getGroupInformation)
.orElseGet(GroupInformation::new);
}
} |
I mean the checking for `authentication != null` can be done early, like the below code: ```java public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException { Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); if (authentication != null) { LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities()); return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER); } ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes(); HttpSession session = attr.getRequest().getSession(true); // ... return defaultOidcUser; } ``` | public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
Assert.notNull(userRequest, "userRequest cannot be null");
ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes();
HttpSession session = attr.getRequest().getSession(true);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities());
return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER);
}
DefaultOidcUser defaultOidcUser = getUser(userRequest);
session.setAttribute(DEFAULT_OIDC_USER, defaultOidcUser);
return defaultOidcUser;
} | } | public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
Assert.notNull(userRequest, "userRequest cannot be null");
ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes();
HttpSession session = attr.getRequest().getSession(true);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities());
return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER);
}
DefaultOidcUser defaultOidcUser = getUser(userRequest);
session.setAttribute(DEFAULT_OIDC_USER, defaultOidcUser);
return defaultOidcUser;
} | class AadOAuth2UserService implements OAuth2UserService<OidcUserRequest, OidcUser> {
private static final Logger LOGGER = LoggerFactory.getLogger(AadOAuth2UserService.class);
private final List<String> allowedGroupNames;
private final Set<String> allowedGroupIds;
private final GraphClient graphClient;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
private static final String ROLES = "roles";
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties, RestTemplateBuilder restTemplateBuilder) {
this(properties, new GraphClient(properties, restTemplateBuilder));
}
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param graphClient the graph client
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient,
RestTemplateBuilder restTemplateBuilder) {
this(properties, graphClient);
}
private AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient) {
allowedGroupNames = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupNames)
.orElseGet(Collections::emptyList);
allowedGroupIds = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupIds)
.orElseGet(Collections::emptySet);
this.graphClient = graphClient;
}
/**
* Returns a {@link DefaultOidcUser} instance.
* <p/>
*
* The {@link DefaultOidcUser} instance is constructed with {@link GrantedAuthority}, {@link OidcIdToken} and nameAttributeKey.
* <a href="https:
* this implementation will not get userinfo from the UserInfo Endpoint. Calling {@link org.springframework.security.oauth2.core.oidc.user.OidcUser
*
* <p/>
*
* @param userRequest the user request
*
* @return a {@link DefaultOidcUser} instance.
*
* @throws OAuth2AuthenticationException if an error occurs.
*/
@Override
DefaultOidcUser getUser(OidcUserRequest userRequest) {
Set<SimpleGrantedAuthority> authorities = buildAuthorities(userRequest);
String nameAttributeKey = getNameAttributeKey(userRequest);
OidcIdToken idToken = userRequest.getIdToken();
DefaultOidcUser defaultOidcUser = new DefaultOidcUser(authorities, idToken, nameAttributeKey);
return defaultOidcUser;
}
private String getNameAttributeKey(OidcUserRequest userRequest) {
return Optional.of(userRequest)
.map(u -> u.getClientRegistration())
.map(u -> u.getProviderDetails())
.map(u -> u.getUserInfoEndpoint())
.map(u -> u.getUserNameAttributeName())
.filter(StringUtils::hasText)
.orElse(AadJwtClaimNames.NAME);
}
private Set<SimpleGrantedAuthority> buildAuthorities(OidcUserRequest userRequest) {
Set<String> authorityStrings = new HashSet<>();
authorityStrings.addAll(extractRolesFromIdToken(userRequest.getIdToken()));
authorityStrings.addAll(extractGroupRolesFromAccessToken(userRequest.getAccessToken()));
Set<SimpleGrantedAuthority> authorities = authorityStrings.stream()
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
if (authorities.isEmpty()) {
authorities = DEFAULT_AUTHORITY_SET;
}
return authorities;
}
/**
* Extract roles from idToken.
*
* @return roles the roles
*/
Set<String> extractRolesFromIdToken(OidcIdToken idToken) {
return Optional.ofNullable(idToken)
.map(token -> (Collection<?>) token.getClaim(ROLES))
.filter(obj -> obj instanceof List<?>)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(s -> StringUtils.hasText(s.toString()))
.map(role -> AuthorityPrefix.APP_ROLE + role)
.collect(Collectors.toSet());
}
/**
* Extract group roles from accessToken.
*
* @return roles the group roles
*/
Set<String> extractGroupRolesFromAccessToken(OAuth2AccessToken accessToken) {
if (allowedGroupNames.isEmpty() && allowedGroupIds.isEmpty()) {
return Collections.emptySet();
}
Set<String> roles = new HashSet<>();
GroupInformation groupInformation = getGroupInformation(accessToken);
if (!allowedGroupNames.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsNames)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(allowedGroupNames::contains)
.forEach(roles::add);
}
if (!allowedGroupIds.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsIds)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(this::isAllowedGroupId)
.forEach(roles::add);
}
return roles.stream()
.map(roleStr -> AuthorityPrefix.ROLE + roleStr)
.collect(Collectors.toSet());
}
private boolean isAllowedGroupId(String groupId) {
if (allowedGroupIds.size() == 1 && allowedGroupIds.contains("all")) {
return true;
}
return allowedGroupIds.contains(groupId);
}
private GroupInformation getGroupInformation(OAuth2AccessToken accessToken) {
return Optional.of(accessToken)
.map(AbstractOAuth2Token::getTokenValue)
.map(graphClient::getGroupInformation)
.orElseGet(GroupInformation::new);
}
} | class AadOAuth2UserService implements OAuth2UserService<OidcUserRequest, OidcUser> {
private static final Logger LOGGER = LoggerFactory.getLogger(AadOAuth2UserService.class);
private final List<String> allowedGroupNames;
private final Set<String> allowedGroupIds;
private final GraphClient graphClient;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
private static final String ROLES = "roles";
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties, RestTemplateBuilder restTemplateBuilder) {
this(properties, new GraphClient(properties, restTemplateBuilder));
}
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param graphClient the graph client
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient,
RestTemplateBuilder restTemplateBuilder) {
this(properties, graphClient);
}
private AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient) {
allowedGroupNames = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupNames)
.orElseGet(Collections::emptyList);
allowedGroupIds = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupIds)
.orElseGet(Collections::emptySet);
this.graphClient = graphClient;
}
/**
* Returns a {@link DefaultOidcUser} instance.
* <p/>
*
* The {@link DefaultOidcUser} instance is constructed with {@link GrantedAuthority}, {@link OidcIdToken} and nameAttributeKey.
* <a href="https:
* this implementation will not get userinfo from the UserInfo Endpoint. Calling {@link org.springframework.security.oauth2.core.oidc.user.OidcUser
*
* <p/>
*
* @param userRequest the user request
*
* @return a {@link DefaultOidcUser} instance.
*
* @throws OAuth2AuthenticationException if an error occurs.
*/
@Override
DefaultOidcUser getUser(OidcUserRequest userRequest) {
Set<SimpleGrantedAuthority> authorities = buildAuthorities(userRequest);
String nameAttributeKey = getNameAttributeKey(userRequest);
OidcIdToken idToken = userRequest.getIdToken();
DefaultOidcUser defaultOidcUser = new DefaultOidcUser(authorities, idToken, nameAttributeKey);
return defaultOidcUser;
}
private String getNameAttributeKey(OidcUserRequest userRequest) {
return Optional.of(userRequest)
.map(u -> u.getClientRegistration())
.map(u -> u.getProviderDetails())
.map(u -> u.getUserInfoEndpoint())
.map(u -> u.getUserNameAttributeName())
.filter(StringUtils::hasText)
.orElse(AadJwtClaimNames.NAME);
}
private Set<SimpleGrantedAuthority> buildAuthorities(OidcUserRequest userRequest) {
Set<String> authorityStrings = new HashSet<>();
authorityStrings.addAll(extractRolesFromIdToken(userRequest.getIdToken()));
authorityStrings.addAll(extractGroupRolesFromAccessToken(userRequest.getAccessToken()));
Set<SimpleGrantedAuthority> authorities = authorityStrings.stream()
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
if (authorities.isEmpty()) {
authorities = DEFAULT_AUTHORITY_SET;
}
return authorities;
}
/**
* Extract roles from idToken.
*
* @return roles the roles
*/
Set<String> extractRolesFromIdToken(OidcIdToken idToken) {
return Optional.ofNullable(idToken)
.map(token -> (Collection<?>) token.getClaim(ROLES))
.filter(obj -> obj instanceof List<?>)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(s -> StringUtils.hasText(s.toString()))
.map(role -> AuthorityPrefix.APP_ROLE + role)
.collect(Collectors.toSet());
}
/**
* Extract group roles from accessToken.
*
* @return roles the group roles
*/
Set<String> extractGroupRolesFromAccessToken(OAuth2AccessToken accessToken) {
if (allowedGroupNames.isEmpty() && allowedGroupIds.isEmpty()) {
return Collections.emptySet();
}
Set<String> roles = new HashSet<>();
GroupInformation groupInformation = getGroupInformation(accessToken);
if (!allowedGroupNames.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsNames)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(allowedGroupNames::contains)
.forEach(roles::add);
}
if (!allowedGroupIds.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsIds)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(this::isAllowedGroupId)
.forEach(roles::add);
}
return roles.stream()
.map(roleStr -> AuthorityPrefix.ROLE + roleStr)
.collect(Collectors.toSet());
}
private boolean isAllowedGroupId(String groupId) {
if (allowedGroupIds.size() == 1 && allowedGroupIds.contains("all")) {
return true;
}
return allowedGroupIds.contains(groupId);
}
private GroupInformation getGroupInformation(OAuth2AccessToken accessToken) {
return Optional.of(accessToken)
.map(AbstractOAuth2Token::getTokenValue)
.map(graphClient::getGroupInformation)
.orElseGet(GroupInformation::new);
}
} |
<img width="861" alt="image" src="https://user-images.githubusercontent.com/4465723/207798822-1320b052-d9dc-43dd-990c-a09e4bc3bb80.png"> We should get the session object first. | public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
Assert.notNull(userRequest, "userRequest cannot be null");
ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes();
HttpSession session = attr.getRequest().getSession(true);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities());
return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER);
}
DefaultOidcUser defaultOidcUser = getUser(userRequest);
session.setAttribute(DEFAULT_OIDC_USER, defaultOidcUser);
return defaultOidcUser;
} | } | public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
Assert.notNull(userRequest, "userRequest cannot be null");
ServletRequestAttributes attr = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes();
HttpSession session = attr.getRequest().getSession(true);
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
if (authentication != null) {
LOGGER.debug("User {}'s authorities saved from session: {}.", authentication.getName(), authentication.getAuthorities());
return (DefaultOidcUser) session.getAttribute(DEFAULT_OIDC_USER);
}
DefaultOidcUser defaultOidcUser = getUser(userRequest);
session.setAttribute(DEFAULT_OIDC_USER, defaultOidcUser);
return defaultOidcUser;
} | class AadOAuth2UserService implements OAuth2UserService<OidcUserRequest, OidcUser> {
private static final Logger LOGGER = LoggerFactory.getLogger(AadOAuth2UserService.class);
private final List<String> allowedGroupNames;
private final Set<String> allowedGroupIds;
private final GraphClient graphClient;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
private static final String ROLES = "roles";
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties, RestTemplateBuilder restTemplateBuilder) {
this(properties, new GraphClient(properties, restTemplateBuilder));
}
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param graphClient the graph client
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient,
RestTemplateBuilder restTemplateBuilder) {
this(properties, graphClient);
}
private AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient) {
allowedGroupNames = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupNames)
.orElseGet(Collections::emptyList);
allowedGroupIds = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupIds)
.orElseGet(Collections::emptySet);
this.graphClient = graphClient;
}
/**
* Returns a {@link DefaultOidcUser} instance.
* <p/>
*
* The {@link DefaultOidcUser} instance is constructed with {@link GrantedAuthority}, {@link OidcIdToken} and nameAttributeKey.
* <a href="https:
* this implementation will not get userinfo from the UserInfo Endpoint. Calling {@link org.springframework.security.oauth2.core.oidc.user.OidcUser
*
* <p/>
*
* @param userRequest the user request
*
* @return a {@link DefaultOidcUser} instance.
*
* @throws OAuth2AuthenticationException if an error occurs.
*/
@Override
DefaultOidcUser getUser(OidcUserRequest userRequest) {
Set<SimpleGrantedAuthority> authorities = buildAuthorities(userRequest);
String nameAttributeKey = getNameAttributeKey(userRequest);
OidcIdToken idToken = userRequest.getIdToken();
DefaultOidcUser defaultOidcUser = new DefaultOidcUser(authorities, idToken, nameAttributeKey);
return defaultOidcUser;
}
private String getNameAttributeKey(OidcUserRequest userRequest) {
return Optional.of(userRequest)
.map(u -> u.getClientRegistration())
.map(u -> u.getProviderDetails())
.map(u -> u.getUserInfoEndpoint())
.map(u -> u.getUserNameAttributeName())
.filter(StringUtils::hasText)
.orElse(AadJwtClaimNames.NAME);
}
private Set<SimpleGrantedAuthority> buildAuthorities(OidcUserRequest userRequest) {
Set<String> authorityStrings = new HashSet<>();
authorityStrings.addAll(extractRolesFromIdToken(userRequest.getIdToken()));
authorityStrings.addAll(extractGroupRolesFromAccessToken(userRequest.getAccessToken()));
Set<SimpleGrantedAuthority> authorities = authorityStrings.stream()
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
if (authorities.isEmpty()) {
authorities = DEFAULT_AUTHORITY_SET;
}
return authorities;
}
/**
* Extract roles from idToken.
*
* @return roles the roles
*/
Set<String> extractRolesFromIdToken(OidcIdToken idToken) {
return Optional.ofNullable(idToken)
.map(token -> (Collection<?>) token.getClaim(ROLES))
.filter(obj -> obj instanceof List<?>)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(s -> StringUtils.hasText(s.toString()))
.map(role -> AuthorityPrefix.APP_ROLE + role)
.collect(Collectors.toSet());
}
/**
* Extract group roles from accessToken.
*
* @return roles the group roles
*/
Set<String> extractGroupRolesFromAccessToken(OAuth2AccessToken accessToken) {
if (allowedGroupNames.isEmpty() && allowedGroupIds.isEmpty()) {
return Collections.emptySet();
}
Set<String> roles = new HashSet<>();
GroupInformation groupInformation = getGroupInformation(accessToken);
if (!allowedGroupNames.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsNames)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(allowedGroupNames::contains)
.forEach(roles::add);
}
if (!allowedGroupIds.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsIds)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(this::isAllowedGroupId)
.forEach(roles::add);
}
return roles.stream()
.map(roleStr -> AuthorityPrefix.ROLE + roleStr)
.collect(Collectors.toSet());
}
private boolean isAllowedGroupId(String groupId) {
if (allowedGroupIds.size() == 1 && allowedGroupIds.contains("all")) {
return true;
}
return allowedGroupIds.contains(groupId);
}
private GroupInformation getGroupInformation(OAuth2AccessToken accessToken) {
return Optional.of(accessToken)
.map(AbstractOAuth2Token::getTokenValue)
.map(graphClient::getGroupInformation)
.orElseGet(GroupInformation::new);
}
} | class AadOAuth2UserService implements OAuth2UserService<OidcUserRequest, OidcUser> {
private static final Logger LOGGER = LoggerFactory.getLogger(AadOAuth2UserService.class);
private final List<String> allowedGroupNames;
private final Set<String> allowedGroupIds;
private final GraphClient graphClient;
private static final String DEFAULT_OIDC_USER = "defaultOidcUser";
private static final String ROLES = "roles";
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties, RestTemplateBuilder restTemplateBuilder) {
this(properties, new GraphClient(properties, restTemplateBuilder));
}
/**
* Creates a new instance of {@link AadOAuth2UserService}.
*
* @param properties the AAD authentication properties
* @param graphClient the graph client
* @param restTemplateBuilder the restTemplateBuilder
*/
public AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient,
RestTemplateBuilder restTemplateBuilder) {
this(properties, graphClient);
}
private AadOAuth2UserService(AadAuthenticationProperties properties,
GraphClient graphClient) {
allowedGroupNames = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupNames)
.orElseGet(Collections::emptyList);
allowedGroupIds = Optional.ofNullable(properties)
.map(AadAuthenticationProperties::getUserGroup)
.map(AadAuthenticationProperties.UserGroupProperties::getAllowedGroupIds)
.orElseGet(Collections::emptySet);
this.graphClient = graphClient;
}
/**
* Returns a {@link DefaultOidcUser} instance.
* <p/>
*
* The {@link DefaultOidcUser} instance is constructed with {@link GrantedAuthority}, {@link OidcIdToken} and nameAttributeKey.
* <a href="https:
* this implementation will not get userinfo from the UserInfo Endpoint. Calling {@link org.springframework.security.oauth2.core.oidc.user.OidcUser
*
* <p/>
*
* @param userRequest the user request
*
* @return a {@link DefaultOidcUser} instance.
*
* @throws OAuth2AuthenticationException if an error occurs.
*/
@Override
DefaultOidcUser getUser(OidcUserRequest userRequest) {
Set<SimpleGrantedAuthority> authorities = buildAuthorities(userRequest);
String nameAttributeKey = getNameAttributeKey(userRequest);
OidcIdToken idToken = userRequest.getIdToken();
DefaultOidcUser defaultOidcUser = new DefaultOidcUser(authorities, idToken, nameAttributeKey);
return defaultOidcUser;
}
private String getNameAttributeKey(OidcUserRequest userRequest) {
return Optional.of(userRequest)
.map(u -> u.getClientRegistration())
.map(u -> u.getProviderDetails())
.map(u -> u.getUserInfoEndpoint())
.map(u -> u.getUserNameAttributeName())
.filter(StringUtils::hasText)
.orElse(AadJwtClaimNames.NAME);
}
private Set<SimpleGrantedAuthority> buildAuthorities(OidcUserRequest userRequest) {
Set<String> authorityStrings = new HashSet<>();
authorityStrings.addAll(extractRolesFromIdToken(userRequest.getIdToken()));
authorityStrings.addAll(extractGroupRolesFromAccessToken(userRequest.getAccessToken()));
Set<SimpleGrantedAuthority> authorities = authorityStrings.stream()
.map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet());
if (authorities.isEmpty()) {
authorities = DEFAULT_AUTHORITY_SET;
}
return authorities;
}
/**
* Extract roles from idToken.
*
* @return roles the roles
*/
Set<String> extractRolesFromIdToken(OidcIdToken idToken) {
return Optional.ofNullable(idToken)
.map(token -> (Collection<?>) token.getClaim(ROLES))
.filter(obj -> obj instanceof List<?>)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(s -> StringUtils.hasText(s.toString()))
.map(role -> AuthorityPrefix.APP_ROLE + role)
.collect(Collectors.toSet());
}
/**
* Extract group roles from accessToken.
*
* @return roles the group roles
*/
Set<String> extractGroupRolesFromAccessToken(OAuth2AccessToken accessToken) {
if (allowedGroupNames.isEmpty() && allowedGroupIds.isEmpty()) {
return Collections.emptySet();
}
Set<String> roles = new HashSet<>();
GroupInformation groupInformation = getGroupInformation(accessToken);
if (!allowedGroupNames.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsNames)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(allowedGroupNames::contains)
.forEach(roles::add);
}
if (!allowedGroupIds.isEmpty()) {
Optional.of(groupInformation)
.map(GroupInformation::getGroupsIds)
.map(Collection::stream)
.orElseGet(Stream::empty)
.filter(this::isAllowedGroupId)
.forEach(roles::add);
}
return roles.stream()
.map(roleStr -> AuthorityPrefix.ROLE + roleStr)
.collect(Collectors.toSet());
}
private boolean isAllowedGroupId(String groupId) {
if (allowedGroupIds.size() == 1 && allowedGroupIds.contains("all")) {
return true;
}
return allowedGroupIds.contains(groupId);
}
private GroupInformation getGroupInformation(OAuth2AccessToken accessToken) {
return Optional.of(accessToken)
.map(AbstractOAuth2Token::getTokenValue)
.map(graphClient::getGroupInformation)
.orElseGet(GroupInformation::new);
}
} |
Should this be info? I assume this is not a success state, so why not `warn` or `error` (if it is actionable) ? | private void completeAllSinks() {
logger.info("Closing all sinks, Context: {}", this.operationContextText);
logger.debug("Executor service shut down, Context: {}", this.operationContextText);
Sinks.EmitResult completeEmitResult = mainSink.tryEmitComplete();
if (completeEmitResult == Sinks.EmitResult.OK) {
logger.debug("Main sink completed, Context: {}", this.operationContextText);
} else {
if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED ||
completeEmitResult == Sinks.EmitResult.FAIL_TERMINATED) {
logger.debug("Main sink already completed, EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
} else {
logger.info(
"Main sink completion failed. EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
}
}
this.shutdown();
} | logger.info( | private void completeAllSinks() {
logger.info("Closing all sinks, Context: {}", this.operationContextText);
logger.debug("Executor service shut down, Context: {}", this.operationContextText);
Sinks.EmitResult completeEmitResult = mainSink.tryEmitComplete();
if (completeEmitResult == Sinks.EmitResult.OK) {
logger.debug("Main sink completed, Context: {}", this.operationContextText);
} else {
if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED ||
completeEmitResult == Sinks.EmitResult.FAIL_TERMINATED) {
logger.debug("Main sink already completed, EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
} else {
logger.warn(
"Main sink completion failed. EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
}
}
this.shutdown();
} | class BulkExecutor<TContext> implements Disposable {
private final static Logger logger = LoggerFactory.getLogger(BulkExecutor.class);
private final static AtomicLong instanceCount = new AtomicLong(0);
private final CosmosAsyncContainer container;
private final AsyncDocumentClient docClientWrapper;
private final String operationContextText;
private final OperationContextAndListenerTuple operationListener;
private final ThrottlingRetryOptions throttlingRetryOptions;
private final Flux<com.azure.cosmos.models.CosmosItemOperation> inputOperations;
private final Long maxMicroBatchIntervalInMs;
private final TContext batchContext;
private final ConcurrentMap<String, PartitionScopeThresholds> partitionScopeThresholds;
private final CosmosBulkExecutionOptions cosmosBulkExecutionOptions;
private final AtomicBoolean mainSourceCompleted;
private final AtomicBoolean isDisposed = new AtomicBoolean(false);
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final AtomicInteger totalCount;
private final Sinks.EmitFailureHandler serializedEmitFailureHandler;
private final Sinks.Many<CosmosItemOperation> mainSink;
private final List<FluxSink<CosmosItemOperation>> groupSinks;
private final ScheduledThreadPoolExecutor executorService;
private final CosmosAsyncClient cosmosClient;
private final String bulkSpanName;
private ScheduledFuture<?> scheduledFutureForFlush;
private final String identifier = "BulkExecutor-" + instanceCount.incrementAndGet();
public BulkExecutor(CosmosAsyncContainer container,
Flux<CosmosItemOperation> inputOperations,
CosmosBulkExecutionOptions cosmosBulkOptions) {
checkNotNull(container, "expected non-null container");
checkNotNull(inputOperations, "expected non-null inputOperations");
checkNotNull(cosmosBulkOptions, "expected non-null bulkOptions");
this.cosmosBulkExecutionOptions = cosmosBulkOptions;
this.container = container;
this.bulkSpanName = "nonTransactionalBatch." + this.container.getId();
this.inputOperations = inputOperations;
this.docClientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(container.getDatabase());
this.cosmosClient = ImplementationBridgeHelpers
.CosmosAsyncDatabaseHelper
.getCosmosAsyncDatabaseAccessor()
.getCosmosAsyncClient(container.getDatabase());
this.throttlingRetryOptions = docClientWrapper.getConnectionPolicy().getThrottlingRetryOptions();
maxMicroBatchIntervalInMs = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchInterval(cosmosBulkExecutionOptions)
.toMillis();
batchContext = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getLegacyBatchScopedContext(cosmosBulkExecutionOptions);
this.partitionScopeThresholds = ImplementationBridgeHelpers.CosmosBulkExecutionThresholdsStateHelper
.getBulkExecutionThresholdsAccessor()
.getPartitionScopeThresholds(cosmosBulkExecutionOptions.getThresholdsState());
operationListener = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getOperationContext(cosmosBulkExecutionOptions);
if (operationListener != null &&
operationListener.getOperationContext() != null) {
operationContextText = identifier + "[" + operationListener.getOperationContext().toString() + "]";
} else {
operationContextText = identifier +"[n/a]";
}
mainSourceCompleted = new AtomicBoolean(false);
totalCount = new AtomicInteger(0);
serializedEmitFailureHandler = new SerializedEmitFailureHandler();
mainSink = Sinks.many().unicast().onBackpressureBuffer();
groupSinks = new CopyOnWriteArrayList<>();
this.executorService = new ScheduledThreadPoolExecutor(
1,
new CosmosDaemonThreadFactory(identifier));
this.executorService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.executorService.setRemoveOnCancelPolicy(true);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
this.maxMicroBatchIntervalInMs,
this.maxMicroBatchIntervalInMs,
TimeUnit.MILLISECONDS);
logger.debug("Instantiated BulkExecutor, Context: {}",
this.operationContextText);
}
@Override
public void dispose() {
if (this.isDisposed.compareAndSet(false, true)) {
long totalCountSnapshot = totalCount.get();
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.shutdown();
}
}
}
@Override
public boolean isDisposed() {
return this.isDisposed.get();
}
private void cancelFlushTask() {
ScheduledFuture<?> scheduledFutureSnapshot = this.scheduledFutureForFlush;
if (scheduledFutureSnapshot != null) {
try {
scheduledFutureSnapshot.cancel(true);
logger.debug("Cancelled all future scheduled tasks {}, Context: {}", getThreadInfo(), this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to cancel scheduled tasks{}, Context: {}", getThreadInfo(), this.operationContextText, e);
}
}
}
private void shutdown() {
if (this.isShutdown.compareAndSet(false, true)) {
logger.debug("Shutting down, Context: {}", this.operationContextText);
groupSinks.forEach(FluxSink::complete);
logger.debug("All group sinks completed, Context: {}", this.operationContextText);
this.cancelFlushTask();
try {
logger.debug("Shutting down the executor service, Context: {}", this.operationContextText);
this.executorService.shutdownNow();
logger.debug("Successfully shut down the executor service, Context: {}", this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to shut down the executor service, Context: {}", this.operationContextText, e);
}
}
}
public Flux<CosmosBulkOperationResponse<TContext>> execute() {
return this
.executeCore()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
this.totalCount.get(),
this.operationContextText,
getThreadInfo());
} else {
int itemsLeftSnapshot = this.totalCount.get();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
}
}
this.dispose();
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executeCore() {
Integer nullableMaxConcurrentCosmosPartitions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxConcurrentCosmosPartitions(cosmosBulkExecutionOptions);
Mono<Integer> maxConcurrentCosmosPartitionsMono = nullableMaxConcurrentCosmosPartitions != null ?
Mono.just(Math.max(256, nullableMaxConcurrentCosmosPartitions)) :
this.container.getFeedRanges().map(ranges -> Math.max(256, ranges.size() * 2));
return
maxConcurrentCosmosPartitionsMono
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(maxConcurrentCosmosPartitions -> {
logger.debug("BulkExecutor.execute with MaxConcurrentPartitions: {}, Context: {}",
maxConcurrentCosmosPartitions,
this.operationContextText);
return this.inputOperations
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.onErrorContinue((throwable, o) ->
logger.error("Skipping an error operation while processing {}. Cause: {}, Context: {}",
o,
throwable.getMessage(),
this.operationContextText))
.doOnNext((CosmosItemOperation cosmosItemOperation) -> {
BulkExecutorUtil.setRetryPolicyForBulk(
docClientWrapper,
this.container,
cosmosItemOperation,
this.throttlingRetryOptions);
if (cosmosItemOperation != FlushBuffersItemOperation.singleton()) {
totalCount.incrementAndGet();
}
logger.trace(
"SetupRetryPolicy, {}, TotalCount: {}, Context: {}, {}",
getItemOperationDiagnostics(cosmosItemOperation),
totalCount.get(),
this.operationContextText,
getThreadInfo()
);
})
.doOnComplete(() -> {
mainSourceCompleted.set(true);
long totalCountSnapshot = totalCount.get();
logger.debug("Main source completed -
totalCountSnapshot,
this.operationContextText);
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.cancelFlushTask();
this.onFlush();
long flushIntervalAfterDrainingIncomingFlux = Math.min(
this.maxMicroBatchIntervalInMs,
BatchRequestResponseConstants
.DEFAULT_MAX_MICRO_BATCH_INTERVAL_AFTER_DRAINING_INCOMING_FLUX_IN_MILLISECONDS);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
flushIntervalAfterDrainingIncomingFlux,
flushIntervalAfterDrainingIncomingFlux,
TimeUnit.MILLISECONDS);
logger.debug("Scheduled new flush operation {}, Context: {}", getThreadInfo(), this.operationContextText);
}
})
.mergeWith(mainSink.asFlux())
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap(
operation -> {
logger.trace("Before Resolve PkRangeId, {}, Context: {} {}",
getItemOperationDiagnostics(operation),
this.operationContextText,
getThreadInfo());
return BulkExecutorUtil.resolvePartitionKeyRangeId(this.docClientWrapper, this.container, operation)
.map((String pkRangeId) -> {
PartitionScopeThresholds partitionScopeThresholds =
this.partitionScopeThresholds.computeIfAbsent(
pkRangeId,
(newPkRangeId) -> new PartitionScopeThresholds(newPkRangeId, this.cosmosBulkExecutionOptions));
logger.trace("Resolved PkRangeId, {}, PKRangeId: {} Context: {} {}",
getItemOperationDiagnostics(operation),
pkRangeId,
this.operationContextText,
getThreadInfo());
return Pair.of(partitionScopeThresholds, operation);
});
})
.groupBy(Pair::getKey, Pair::getValue)
.flatMap(
this::executePartitionedGroup,
maxConcurrentCosmosPartitions)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.doOnNext(requestAndResponse -> {
int totalCountAfterDecrement = totalCount.decrementAndGet();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountAfterDecrement == 0 && mainSourceCompletedSnapshot) {
logger.debug("All work completed, {}, TotalCount: {}, Context: {} {}",
getItemOperationDiagnostics(requestAndResponse.getOperation()),
totalCountAfterDecrement,
this.operationContextText,
getThreadInfo());
completeAllSinks();
} else {
if (totalCountAfterDecrement == 0) {
logger.debug(
"No Work left - but mainSource not yet completed, Context: {} {}",
this.operationContextText,
getThreadInfo());
}
logger.trace(
"Work left - TotalCount after decrement: {}, main sink completed {}, {}, Context: {} {}",
totalCountAfterDecrement,
mainSourceCompletedSnapshot,
getItemOperationDiagnostics(requestAndResponse.getOperation()),
this.operationContextText,
getThreadInfo());
}
})
.doOnComplete(() -> {
int totalCountSnapshot = totalCount.get();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountSnapshot == 0 && mainSourceCompletedSnapshot) {
logger.debug("DoOnComplete: All work completed, Context: {}", this.operationContextText);
completeAllSinks();
} else {
logger.debug(
"DoOnComplete: Work left - TotalCount after decrement: {}, main sink completed {}, Context: {} {}",
totalCountSnapshot,
mainSourceCompletedSnapshot,
this.operationContextText,
getThreadInfo());
}
});
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionedGroup(
GroupedFlux<PartitionScopeThresholds, CosmosItemOperation> partitionedGroupFluxOfInputOperations) {
final PartitionScopeThresholds thresholds = partitionedGroupFluxOfInputOperations.key();
final FluxProcessor<CosmosItemOperation, CosmosItemOperation> groupFluxProcessor =
UnicastProcessor.<CosmosItemOperation>create().serialize();
final FluxSink<CosmosItemOperation> groupSink = groupFluxProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
groupSinks.add(groupSink);
AtomicLong firstRecordTimeStamp = new AtomicLong(-1);
AtomicLong currentMicroBatchSize = new AtomicLong(0);
AtomicInteger currentTotalSerializedLength = new AtomicInteger(0);
return partitionedGroupFluxOfInputOperations
.mergeWith(groupFluxProcessor)
.onBackpressureBuffer()
.timestamp()
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.bufferUntil(timeStampItemOperationTuple -> {
long timestamp = timeStampItemOperationTuple.getT1();
CosmosItemOperation itemOperation = timeStampItemOperationTuple.getT2();
logger.trace(
"BufferUntil - enqueued {}, {}, Context: {} {}",
timestamp,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (itemOperation == FlushBuffersItemOperation.singleton()) {
long currentMicroBatchSizeSnapshot = currentMicroBatchSize.get();
if (currentMicroBatchSizeSnapshot > 0) {
logger.trace(
"Flushing PKRange {} (batch size: {}) due to FlushItemOperation, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
currentMicroBatchSizeSnapshot,
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
}
firstRecordTimeStamp.compareAndSet(-1, timestamp);
long age = timestamp - firstRecordTimeStamp.get();
long batchSize = currentMicroBatchSize.incrementAndGet();
int totalSerializedLength = this.calculateTotalSerializedLength(currentTotalSerializedLength, itemOperation);
if (batchSize >= thresholds.getTargetMicroBatchSizeSnapshot() ||
age >= this.maxMicroBatchIntervalInMs ||
totalSerializedLength >= BatchRequestResponseConstants.MAX_DIRECT_MODE_BATCH_REQUEST_BODY_SIZE_IN_BYTES) {
logger.debug(
"BufferUntil - Flushing PKRange {} due to BatchSize ({}), payload size ({}) or age ({}), " +
"Triggering {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
batchSize,
totalSerializedLength,
age,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
})
.flatMap(
(List<Tuple2<Long, CosmosItemOperation>> timeStampAndItemOperationTuples) -> {
List<CosmosItemOperation> operations = new ArrayList<>(timeStampAndItemOperationTuples.size());
for (Tuple2<Long, CosmosItemOperation> timeStampAndItemOperationTuple :
timeStampAndItemOperationTuples) {
CosmosItemOperation itemOperation = timeStampAndItemOperationTuple.getT2();
if (itemOperation == FlushBuffersItemOperation.singleton()) {
continue;
}
operations.add(itemOperation);
}
logger.debug(
"Flushing PKRange {} micro batch with {} operations, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
operations.size(),
this.operationContextText,
getThreadInfo());
return executeOperations(operations, thresholds, groupSink);
},
ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchConcurrency(this.cosmosBulkExecutionOptions));
}
private int calculateTotalSerializedLength(AtomicInteger currentTotalSerializedLength, CosmosItemOperation item) {
if (item instanceof CosmosItemOperationBase) {
return currentTotalSerializedLength.accumulateAndGet(
((CosmosItemOperationBase) item).getSerializedLength(),
(currentValue, incremental) -> currentValue + incremental);
}
return currentTotalSerializedLength.get();
}
private Flux<CosmosBulkOperationResponse<TContext>> executeOperations(
List<CosmosItemOperation> operations,
PartitionScopeThresholds thresholds,
FluxSink<CosmosItemOperation> groupSink) {
if (operations.size() == 0) {
logger.trace("Empty operations list, Context: {}", this.operationContextText);
return Flux.empty();
}
String pkRange = thresholds.getPartitionKeyRangeId();
ServerOperationBatchRequest serverOperationBatchRequest =
BulkExecutorUtil.createBatchRequest(operations, pkRange);
if (serverOperationBatchRequest.getBatchPendingOperations().size() > 0) {
serverOperationBatchRequest.getBatchPendingOperations().forEach(groupSink::next);
}
return Flux.just(serverOperationBatchRequest.getBatchRequest())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((PartitionKeyRangeServerBatchRequest serverRequest) ->
this.executePartitionKeyRangeServerBatchRequest(serverRequest, groupSink, thresholds));
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionKeyRangeServerBatchRequest(
PartitionKeyRangeServerBatchRequest serverRequest,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
return this.executeBatchRequest(serverRequest)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(response ->
Flux
.fromIterable(response.getResults())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosBatchOperationResult result) ->
handleTransactionalBatchOperationResult(response, result, groupSink, thresholds)))
.onErrorResume((Throwable throwable) -> {
if (!(throwable instanceof Exception)) {
throw Exceptions.propagate(throwable);
}
Exception exception = (Exception) throwable;
return Flux
.fromIterable(serverRequest.getOperations())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosItemOperation itemOperation) ->
handleTransactionalBatchExecutionException(itemOperation, exception, groupSink, thresholds));
});
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchOperationResult(
CosmosBatchResponse response,
CosmosBatchOperationResult operationResult,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
CosmosBulkItemResponse cosmosBulkItemResponse = ModelBridgeInternal
.createCosmosBulkItemResponse(operationResult, response);
CosmosItemOperation itemOperation = operationResult.getOperation();
TContext actualContext = this.getActualContext(itemOperation);
logger.debug(
"HandleTransactionalBatchOperationResult - PKRange {}, Response Status Code {}, " +
"Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (!operationResult.isSuccessStatusCode()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy().shouldRetry(operationResult).flatMap(
result -> {
if (result.shouldRetry) {
logger.debug(
"HandleTransactionalBatchOperationResult - enqueue retry, PKRange {}, Response " +
"Status Code {}, Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return this.enqueueForRetry(result.backOffTime, groupSink, itemOperation, thresholds);
} else {
if (response.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
response.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED) {
logger.debug(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
} else {
logger.error(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
}
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, cosmosBulkItemResponse, actualContext));
}
});
} else {
throw new UnsupportedOperationException("Unknown CosmosItemOperation.");
}
}
thresholds.recordSuccessfulOperation();
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation,
cosmosBulkItemResponse,
actualContext));
}
private TContext getActualContext(CosmosItemOperation itemOperation) {
ItemBulkOperation<?, ?> itemBulkOperation = null;
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
}
if (itemBulkOperation == null) {
return this.batchContext;
}
TContext operationContext = itemBulkOperation.getContext();
if (operationContext != null) {
return operationContext;
}
return this.batchContext;
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchExecutionException(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
logger.debug(
"HandleTransactionalBatchExecutionException, PKRange {}, Error: {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (exception instanceof CosmosException && itemOperation instanceof ItemBulkOperation<?, ?>) {
CosmosException cosmosException = (CosmosException) exception;
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy()
.shouldRetryForGone(cosmosException.getStatusCode(), cosmosException.getSubStatusCode())
.flatMap(shouldRetryGone -> {
if (shouldRetryGone) {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry due to split, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
mainSink.emitNext(itemOperation, serializedEmitFailureHandler);
return Mono.empty();
} else {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry other, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return retryOtherExceptions(
itemOperation,
exception,
groupSink,
cosmosException,
itemBulkOperation,
thresholds);
}
});
}
TContext actualContext = this.getActualContext(itemOperation);
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(itemOperation, exception, actualContext));
}
private Mono<CosmosBulkOperationResponse<TContext>> enqueueForRetry(
Duration backOffTime,
FluxSink<CosmosItemOperation> groupSink,
CosmosItemOperation itemOperation,
PartitionScopeThresholds thresholds) {
thresholds.recordEnqueuedRetry();
if (backOffTime == null || backOffTime.isZero()) {
groupSink.next(itemOperation);
return Mono.empty();
} else {
return Mono
.delay(backOffTime)
.flatMap((dummy) -> {
groupSink.next(itemOperation);
return Mono.empty();
});
}
}
private Mono<CosmosBulkOperationResponse<TContext>> retryOtherExceptions(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
CosmosException cosmosException,
ItemBulkOperation<?, ?> itemBulkOperation,
PartitionScopeThresholds thresholds) {
TContext actualContext = this.getActualContext(itemOperation);
return itemBulkOperation.getRetryPolicy().shouldRetry(cosmosException).flatMap(result -> {
if (result.shouldRetry) {
return this.enqueueForRetry(result.backOffTime, groupSink, itemBulkOperation, thresholds);
} else {
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, exception, actualContext));
}
});
}
private Mono<CosmosBatchResponse> executeBatchRequest(PartitionKeyRangeServerBatchRequest serverRequest) {
RequestOptions options = new RequestOptions();
options.setThroughputControlGroupName(cosmosBulkExecutionOptions.getThroughputControlGroupName());
Map<String, String> customOptions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getCustomOptions(cosmosBulkExecutionOptions);
if (customOptions != null && !customOptions.isEmpty()) {
for(Map.Entry<String, String> entry : customOptions.entrySet()) {
options.setHeader(entry.getKey(), entry.getValue());
}
}
options.setOperationContextAndListenerTuple(operationListener);
if (!this.docClientWrapper.isContentResponseOnWriteEnabled() &&
serverRequest.getOperations().size() > 0) {
for (CosmosItemOperation itemOperation : serverRequest.getOperations()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
if (itemBulkOperation.getOperationType() == CosmosItemOperationType.READ ||
(itemBulkOperation.getRequestOptions() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled().booleanValue())) {
options.setContentResponseOnWriteEnabled(true);
break;
}
}
}
}
return withContext(context -> {
final Mono<CosmosBatchResponse> responseMono = this.docClientWrapper.executeBatchRequest(
BridgeInternal.getLink(this.container), serverRequest, options, false);
return BridgeInternal.getTracerProvider(this.cosmosClient)
.traceEnabledBatchResponsePublisher(
responseMono,
context,
this.bulkSpanName,
this.container.getId(),
this.container.getDatabase().getId(),
this.cosmosClient,
options.getConsistencyLevel(),
OperationType.Batch,
ResourceType.Document);
});
}
private void onFlush() {
try {
this.groupSinks.forEach(sink -> sink.next(FlushBuffersItemOperation.singleton()));
} catch(Throwable t) {
logger.error("Callback invocation 'onFlush' failed. Context: {}", this.operationContextText, t);
}
}
private static String getItemOperationDiagnostics(CosmosItemOperation operation) {
if (operation == FlushBuffersItemOperation.singleton()) {
return "ItemOperation[Type: Flush]";
}
StringBuilder sb = new StringBuilder();
sb
.append("ItemOperation[Type: ")
.append(operation.getOperationType().toString())
.append(", PK: ")
.append(operation.getPartitionKeyValue() != null ? operation.getPartitionKeyValue().toString() : "n/a")
.append(", id: ")
.append(operation.getId())
.append("]");
return sb.toString();
}
private static String getThreadInfo() {
StringBuilder sb = new StringBuilder();
Thread t = Thread.currentThread();
sb
.append("Thread[")
.append("Name: ")
.append(t.getName())
.append(",Group: ")
.append(t.getThreadGroup() != null ? t.getThreadGroup().getName() : "n/a")
.append(", isDaemon: ")
.append(t.isDaemon())
.append(", Id: ")
.append(t.getId())
.append("]");
return sb.toString();
}
private class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {
@Override
public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {
if (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {
logger.debug("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return true;
}
logger.error("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return false;
}
}
} | class BulkExecutor<TContext> implements Disposable {
private final static Logger logger = LoggerFactory.getLogger(BulkExecutor.class);
private final static AtomicLong instanceCount = new AtomicLong(0);
private final CosmosAsyncContainer container;
private final AsyncDocumentClient docClientWrapper;
private final String operationContextText;
private final OperationContextAndListenerTuple operationListener;
private final ThrottlingRetryOptions throttlingRetryOptions;
private final Flux<com.azure.cosmos.models.CosmosItemOperation> inputOperations;
private final Long maxMicroBatchIntervalInMs;
private final TContext batchContext;
private final ConcurrentMap<String, PartitionScopeThresholds> partitionScopeThresholds;
private final CosmosBulkExecutionOptions cosmosBulkExecutionOptions;
private final AtomicBoolean mainSourceCompleted;
private final AtomicBoolean isDisposed = new AtomicBoolean(false);
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final AtomicInteger totalCount;
private final Sinks.EmitFailureHandler serializedEmitFailureHandler;
private final Sinks.Many<CosmosItemOperation> mainSink;
private final List<FluxSink<CosmosItemOperation>> groupSinks;
private final ScheduledThreadPoolExecutor executorService;
private final CosmosAsyncClient cosmosClient;
private final String bulkSpanName;
private ScheduledFuture<?> scheduledFutureForFlush;
private final String identifier = "BulkExecutor-" + instanceCount.incrementAndGet();
public BulkExecutor(CosmosAsyncContainer container,
Flux<CosmosItemOperation> inputOperations,
CosmosBulkExecutionOptions cosmosBulkOptions) {
checkNotNull(container, "expected non-null container");
checkNotNull(inputOperations, "expected non-null inputOperations");
checkNotNull(cosmosBulkOptions, "expected non-null bulkOptions");
this.cosmosBulkExecutionOptions = cosmosBulkOptions;
this.container = container;
this.bulkSpanName = "nonTransactionalBatch." + this.container.getId();
this.inputOperations = inputOperations;
this.docClientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(container.getDatabase());
this.cosmosClient = ImplementationBridgeHelpers
.CosmosAsyncDatabaseHelper
.getCosmosAsyncDatabaseAccessor()
.getCosmosAsyncClient(container.getDatabase());
this.throttlingRetryOptions = docClientWrapper.getConnectionPolicy().getThrottlingRetryOptions();
maxMicroBatchIntervalInMs = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchInterval(cosmosBulkExecutionOptions)
.toMillis();
batchContext = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getLegacyBatchScopedContext(cosmosBulkExecutionOptions);
this.partitionScopeThresholds = ImplementationBridgeHelpers.CosmosBulkExecutionThresholdsStateHelper
.getBulkExecutionThresholdsAccessor()
.getPartitionScopeThresholds(cosmosBulkExecutionOptions.getThresholdsState());
operationListener = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getOperationContext(cosmosBulkExecutionOptions);
if (operationListener != null &&
operationListener.getOperationContext() != null) {
operationContextText = identifier + "[" + operationListener.getOperationContext().toString() + "]";
} else {
operationContextText = identifier +"[n/a]";
}
mainSourceCompleted = new AtomicBoolean(false);
totalCount = new AtomicInteger(0);
serializedEmitFailureHandler = new SerializedEmitFailureHandler();
mainSink = Sinks.many().unicast().onBackpressureBuffer();
groupSinks = new CopyOnWriteArrayList<>();
this.executorService = new ScheduledThreadPoolExecutor(
1,
new CosmosDaemonThreadFactory(identifier));
this.executorService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.executorService.setRemoveOnCancelPolicy(true);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
this.maxMicroBatchIntervalInMs,
this.maxMicroBatchIntervalInMs,
TimeUnit.MILLISECONDS);
logger.debug("Instantiated BulkExecutor, Context: {}",
this.operationContextText);
}
@Override
public void dispose() {
if (this.isDisposed.compareAndSet(false, true)) {
long totalCountSnapshot = totalCount.get();
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.shutdown();
}
}
}
@Override
public boolean isDisposed() {
return this.isDisposed.get();
}
private void cancelFlushTask() {
ScheduledFuture<?> scheduledFutureSnapshot = this.scheduledFutureForFlush;
if (scheduledFutureSnapshot != null) {
try {
scheduledFutureSnapshot.cancel(true);
logger.debug("Cancelled all future scheduled tasks {}, Context: {}", getThreadInfo(), this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to cancel scheduled tasks{}, Context: {}", getThreadInfo(), this.operationContextText, e);
}
}
}
private void shutdown() {
if (this.isShutdown.compareAndSet(false, true)) {
logger.debug("Shutting down, Context: {}", this.operationContextText);
groupSinks.forEach(FluxSink::complete);
logger.debug("All group sinks completed, Context: {}", this.operationContextText);
this.cancelFlushTask();
try {
logger.debug("Shutting down the executor service, Context: {}", this.operationContextText);
this.executorService.shutdownNow();
logger.debug("Successfully shut down the executor service, Context: {}", this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to shut down the executor service, Context: {}", this.operationContextText, e);
}
}
}
public Flux<CosmosBulkOperationResponse<TContext>> execute() {
return this
.executeCore()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
this.totalCount.get(),
this.operationContextText,
getThreadInfo());
} else {
int itemsLeftSnapshot = this.totalCount.get();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
}
}
this.dispose();
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executeCore() {
Integer nullableMaxConcurrentCosmosPartitions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxConcurrentCosmosPartitions(cosmosBulkExecutionOptions);
Mono<Integer> maxConcurrentCosmosPartitionsMono = nullableMaxConcurrentCosmosPartitions != null ?
Mono.just(Math.max(256, nullableMaxConcurrentCosmosPartitions)) :
this.container.getFeedRanges().map(ranges -> Math.max(256, ranges.size() * 2));
return
maxConcurrentCosmosPartitionsMono
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(maxConcurrentCosmosPartitions -> {
logger.debug("BulkExecutor.execute with MaxConcurrentPartitions: {}, Context: {}",
maxConcurrentCosmosPartitions,
this.operationContextText);
return this.inputOperations
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.onErrorContinue((throwable, o) ->
logger.error("Skipping an error operation while processing {}. Cause: {}, Context: {}",
o,
throwable.getMessage(),
this.operationContextText))
.doOnNext((CosmosItemOperation cosmosItemOperation) -> {
BulkExecutorUtil.setRetryPolicyForBulk(
docClientWrapper,
this.container,
cosmosItemOperation,
this.throttlingRetryOptions);
if (cosmosItemOperation != FlushBuffersItemOperation.singleton()) {
totalCount.incrementAndGet();
}
logger.trace(
"SetupRetryPolicy, {}, TotalCount: {}, Context: {}, {}",
getItemOperationDiagnostics(cosmosItemOperation),
totalCount.get(),
this.operationContextText,
getThreadInfo()
);
})
.doOnComplete(() -> {
mainSourceCompleted.set(true);
long totalCountSnapshot = totalCount.get();
logger.debug("Main source completed -
totalCountSnapshot,
this.operationContextText);
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.cancelFlushTask();
this.onFlush();
long flushIntervalAfterDrainingIncomingFlux = Math.min(
this.maxMicroBatchIntervalInMs,
BatchRequestResponseConstants
.DEFAULT_MAX_MICRO_BATCH_INTERVAL_AFTER_DRAINING_INCOMING_FLUX_IN_MILLISECONDS);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
flushIntervalAfterDrainingIncomingFlux,
flushIntervalAfterDrainingIncomingFlux,
TimeUnit.MILLISECONDS);
logger.debug("Scheduled new flush operation {}, Context: {}", getThreadInfo(), this.operationContextText);
}
})
.mergeWith(mainSink.asFlux())
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap(
operation -> {
logger.trace("Before Resolve PkRangeId, {}, Context: {} {}",
getItemOperationDiagnostics(operation),
this.operationContextText,
getThreadInfo());
return BulkExecutorUtil.resolvePartitionKeyRangeId(this.docClientWrapper, this.container, operation)
.map((String pkRangeId) -> {
PartitionScopeThresholds partitionScopeThresholds =
this.partitionScopeThresholds.computeIfAbsent(
pkRangeId,
(newPkRangeId) -> new PartitionScopeThresholds(newPkRangeId, this.cosmosBulkExecutionOptions));
logger.trace("Resolved PkRangeId, {}, PKRangeId: {} Context: {} {}",
getItemOperationDiagnostics(operation),
pkRangeId,
this.operationContextText,
getThreadInfo());
return Pair.of(partitionScopeThresholds, operation);
});
})
.groupBy(Pair::getKey, Pair::getValue)
.flatMap(
this::executePartitionedGroup,
maxConcurrentCosmosPartitions)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.doOnNext(requestAndResponse -> {
int totalCountAfterDecrement = totalCount.decrementAndGet();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountAfterDecrement == 0 && mainSourceCompletedSnapshot) {
logger.debug("All work completed, {}, TotalCount: {}, Context: {} {}",
getItemOperationDiagnostics(requestAndResponse.getOperation()),
totalCountAfterDecrement,
this.operationContextText,
getThreadInfo());
completeAllSinks();
} else {
if (totalCountAfterDecrement == 0) {
logger.debug(
"No Work left - but mainSource not yet completed, Context: {} {}",
this.operationContextText,
getThreadInfo());
}
logger.trace(
"Work left - TotalCount after decrement: {}, main sink completed {}, {}, Context: {} {}",
totalCountAfterDecrement,
mainSourceCompletedSnapshot,
getItemOperationDiagnostics(requestAndResponse.getOperation()),
this.operationContextText,
getThreadInfo());
}
})
.doOnComplete(() -> {
int totalCountSnapshot = totalCount.get();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountSnapshot == 0 && mainSourceCompletedSnapshot) {
logger.debug("DoOnComplete: All work completed, Context: {}", this.operationContextText);
completeAllSinks();
} else {
logger.debug(
"DoOnComplete: Work left - TotalCount after decrement: {}, main sink completed {}, Context: {} {}",
totalCountSnapshot,
mainSourceCompletedSnapshot,
this.operationContextText,
getThreadInfo());
}
});
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionedGroup(
GroupedFlux<PartitionScopeThresholds, CosmosItemOperation> partitionedGroupFluxOfInputOperations) {
final PartitionScopeThresholds thresholds = partitionedGroupFluxOfInputOperations.key();
final FluxProcessor<CosmosItemOperation, CosmosItemOperation> groupFluxProcessor =
UnicastProcessor.<CosmosItemOperation>create().serialize();
final FluxSink<CosmosItemOperation> groupSink = groupFluxProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
groupSinks.add(groupSink);
AtomicLong firstRecordTimeStamp = new AtomicLong(-1);
AtomicLong currentMicroBatchSize = new AtomicLong(0);
AtomicInteger currentTotalSerializedLength = new AtomicInteger(0);
return partitionedGroupFluxOfInputOperations
.mergeWith(groupFluxProcessor)
.onBackpressureBuffer()
.timestamp()
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.bufferUntil(timeStampItemOperationTuple -> {
long timestamp = timeStampItemOperationTuple.getT1();
CosmosItemOperation itemOperation = timeStampItemOperationTuple.getT2();
logger.trace(
"BufferUntil - enqueued {}, {}, Context: {} {}",
timestamp,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (itemOperation == FlushBuffersItemOperation.singleton()) {
long currentMicroBatchSizeSnapshot = currentMicroBatchSize.get();
if (currentMicroBatchSizeSnapshot > 0) {
logger.trace(
"Flushing PKRange {} (batch size: {}) due to FlushItemOperation, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
currentMicroBatchSizeSnapshot,
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
}
firstRecordTimeStamp.compareAndSet(-1, timestamp);
long age = timestamp - firstRecordTimeStamp.get();
long batchSize = currentMicroBatchSize.incrementAndGet();
int totalSerializedLength = this.calculateTotalSerializedLength(currentTotalSerializedLength, itemOperation);
if (batchSize >= thresholds.getTargetMicroBatchSizeSnapshot() ||
age >= this.maxMicroBatchIntervalInMs ||
totalSerializedLength >= BatchRequestResponseConstants.MAX_DIRECT_MODE_BATCH_REQUEST_BODY_SIZE_IN_BYTES) {
logger.debug(
"BufferUntil - Flushing PKRange {} due to BatchSize ({}), payload size ({}) or age ({}), " +
"Triggering {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
batchSize,
totalSerializedLength,
age,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
})
.flatMap(
(List<Tuple2<Long, CosmosItemOperation>> timeStampAndItemOperationTuples) -> {
List<CosmosItemOperation> operations = new ArrayList<>(timeStampAndItemOperationTuples.size());
for (Tuple2<Long, CosmosItemOperation> timeStampAndItemOperationTuple :
timeStampAndItemOperationTuples) {
CosmosItemOperation itemOperation = timeStampAndItemOperationTuple.getT2();
if (itemOperation == FlushBuffersItemOperation.singleton()) {
continue;
}
operations.add(itemOperation);
}
logger.debug(
"Flushing PKRange {} micro batch with {} operations, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
operations.size(),
this.operationContextText,
getThreadInfo());
return executeOperations(operations, thresholds, groupSink);
},
ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchConcurrency(this.cosmosBulkExecutionOptions));
}
private int calculateTotalSerializedLength(AtomicInteger currentTotalSerializedLength, CosmosItemOperation item) {
if (item instanceof CosmosItemOperationBase) {
return currentTotalSerializedLength.accumulateAndGet(
((CosmosItemOperationBase) item).getSerializedLength(),
(currentValue, incremental) -> currentValue + incremental);
}
return currentTotalSerializedLength.get();
}
private Flux<CosmosBulkOperationResponse<TContext>> executeOperations(
List<CosmosItemOperation> operations,
PartitionScopeThresholds thresholds,
FluxSink<CosmosItemOperation> groupSink) {
if (operations.size() == 0) {
logger.trace("Empty operations list, Context: {}", this.operationContextText);
return Flux.empty();
}
String pkRange = thresholds.getPartitionKeyRangeId();
ServerOperationBatchRequest serverOperationBatchRequest =
BulkExecutorUtil.createBatchRequest(operations, pkRange);
if (serverOperationBatchRequest.getBatchPendingOperations().size() > 0) {
serverOperationBatchRequest.getBatchPendingOperations().forEach(groupSink::next);
}
return Flux.just(serverOperationBatchRequest.getBatchRequest())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((PartitionKeyRangeServerBatchRequest serverRequest) ->
this.executePartitionKeyRangeServerBatchRequest(serverRequest, groupSink, thresholds));
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionKeyRangeServerBatchRequest(
PartitionKeyRangeServerBatchRequest serverRequest,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
return this.executeBatchRequest(serverRequest)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(response ->
Flux
.fromIterable(response.getResults())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosBatchOperationResult result) ->
handleTransactionalBatchOperationResult(response, result, groupSink, thresholds)))
.onErrorResume((Throwable throwable) -> {
if (!(throwable instanceof Exception)) {
throw Exceptions.propagate(throwable);
}
Exception exception = (Exception) throwable;
return Flux
.fromIterable(serverRequest.getOperations())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosItemOperation itemOperation) ->
handleTransactionalBatchExecutionException(itemOperation, exception, groupSink, thresholds));
});
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchOperationResult(
CosmosBatchResponse response,
CosmosBatchOperationResult operationResult,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
CosmosBulkItemResponse cosmosBulkItemResponse = ModelBridgeInternal
.createCosmosBulkItemResponse(operationResult, response);
CosmosItemOperation itemOperation = operationResult.getOperation();
TContext actualContext = this.getActualContext(itemOperation);
logger.debug(
"HandleTransactionalBatchOperationResult - PKRange {}, Response Status Code {}, " +
"Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (!operationResult.isSuccessStatusCode()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy().shouldRetry(operationResult).flatMap(
result -> {
if (result.shouldRetry) {
logger.debug(
"HandleTransactionalBatchOperationResult - enqueue retry, PKRange {}, Response " +
"Status Code {}, Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return this.enqueueForRetry(result.backOffTime, groupSink, itemOperation, thresholds);
} else {
if (response.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
response.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED) {
logger.debug(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
} else {
logger.error(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
}
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, cosmosBulkItemResponse, actualContext));
}
});
} else {
throw new UnsupportedOperationException("Unknown CosmosItemOperation.");
}
}
thresholds.recordSuccessfulOperation();
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation,
cosmosBulkItemResponse,
actualContext));
}
private TContext getActualContext(CosmosItemOperation itemOperation) {
ItemBulkOperation<?, ?> itemBulkOperation = null;
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
}
if (itemBulkOperation == null) {
return this.batchContext;
}
TContext operationContext = itemBulkOperation.getContext();
if (operationContext != null) {
return operationContext;
}
return this.batchContext;
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchExecutionException(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
logger.debug(
"HandleTransactionalBatchExecutionException, PKRange {}, Error: {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (exception instanceof CosmosException && itemOperation instanceof ItemBulkOperation<?, ?>) {
CosmosException cosmosException = (CosmosException) exception;
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy()
.shouldRetryForGone(cosmosException.getStatusCode(), cosmosException.getSubStatusCode())
.flatMap(shouldRetryGone -> {
if (shouldRetryGone) {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry due to split, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
mainSink.emitNext(itemOperation, serializedEmitFailureHandler);
return Mono.empty();
} else {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry other, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return retryOtherExceptions(
itemOperation,
exception,
groupSink,
cosmosException,
itemBulkOperation,
thresholds);
}
});
}
TContext actualContext = this.getActualContext(itemOperation);
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(itemOperation, exception, actualContext));
}
private Mono<CosmosBulkOperationResponse<TContext>> enqueueForRetry(
Duration backOffTime,
FluxSink<CosmosItemOperation> groupSink,
CosmosItemOperation itemOperation,
PartitionScopeThresholds thresholds) {
thresholds.recordEnqueuedRetry();
if (backOffTime == null || backOffTime.isZero()) {
groupSink.next(itemOperation);
return Mono.empty();
} else {
return Mono
.delay(backOffTime)
.flatMap((dummy) -> {
groupSink.next(itemOperation);
return Mono.empty();
});
}
}
private Mono<CosmosBulkOperationResponse<TContext>> retryOtherExceptions(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
CosmosException cosmosException,
ItemBulkOperation<?, ?> itemBulkOperation,
PartitionScopeThresholds thresholds) {
TContext actualContext = this.getActualContext(itemOperation);
return itemBulkOperation.getRetryPolicy().shouldRetry(cosmosException).flatMap(result -> {
if (result.shouldRetry) {
return this.enqueueForRetry(result.backOffTime, groupSink, itemBulkOperation, thresholds);
} else {
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, exception, actualContext));
}
});
}
private Mono<CosmosBatchResponse> executeBatchRequest(PartitionKeyRangeServerBatchRequest serverRequest) {
RequestOptions options = new RequestOptions();
options.setThroughputControlGroupName(cosmosBulkExecutionOptions.getThroughputControlGroupName());
Map<String, String> customOptions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getCustomOptions(cosmosBulkExecutionOptions);
if (customOptions != null && !customOptions.isEmpty()) {
for(Map.Entry<String, String> entry : customOptions.entrySet()) {
options.setHeader(entry.getKey(), entry.getValue());
}
}
options.setOperationContextAndListenerTuple(operationListener);
if (!this.docClientWrapper.isContentResponseOnWriteEnabled() &&
serverRequest.getOperations().size() > 0) {
for (CosmosItemOperation itemOperation : serverRequest.getOperations()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
if (itemBulkOperation.getOperationType() == CosmosItemOperationType.READ ||
(itemBulkOperation.getRequestOptions() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled().booleanValue())) {
options.setContentResponseOnWriteEnabled(true);
break;
}
}
}
}
return withContext(context -> {
final Mono<CosmosBatchResponse> responseMono = this.docClientWrapper.executeBatchRequest(
BridgeInternal.getLink(this.container), serverRequest, options, false);
return BridgeInternal.getTracerProvider(this.cosmosClient)
.traceEnabledBatchResponsePublisher(
responseMono,
context,
this.bulkSpanName,
this.container.getId(),
this.container.getDatabase().getId(),
this.cosmosClient,
options.getConsistencyLevel(),
OperationType.Batch,
ResourceType.Document);
});
}
private void onFlush() {
try {
this.groupSinks.forEach(sink -> sink.next(FlushBuffersItemOperation.singleton()));
} catch(Throwable t) {
logger.error("Callback invocation 'onFlush' failed. Context: {}", this.operationContextText, t);
}
}
private static String getItemOperationDiagnostics(CosmosItemOperation operation) {
if (operation == FlushBuffersItemOperation.singleton()) {
return "ItemOperation[Type: Flush]";
}
StringBuilder sb = new StringBuilder();
sb
.append("ItemOperation[Type: ")
.append(operation.getOperationType().toString())
.append(", PK: ")
.append(operation.getPartitionKeyValue() != null ? operation.getPartitionKeyValue().toString() : "n/a")
.append(", id: ")
.append(operation.getId())
.append("]");
return sb.toString();
}
private static String getThreadInfo() {
StringBuilder sb = new StringBuilder();
Thread t = Thread.currentThread();
sb
.append("Thread[")
.append("Name: ")
.append(t.getName())
.append(",Group: ")
.append(t.getThreadGroup() != null ? t.getThreadGroup().getName() : "n/a")
.append(", isDaemon: ")
.append(t.isDaemon())
.append(", Id: ")
.append(t.getId())
.append("]");
return sb.toString();
}
private class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {
@Override
public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {
if (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {
logger.debug("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return true;
}
logger.error("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return false;
}
}
} |
Not part of this PR, but just curious to see that logger on line 862 - is info. Should we also change it to debug? | private void completeAllSinks() {
logger.info("Closing all sinks, Context: {}", this.operationContextText);
logger.debug("Executor service shut down, Context: {}", this.operationContextText);
Sinks.EmitResult completeEmitResult = mainSink.tryEmitComplete();
if (completeEmitResult == Sinks.EmitResult.OK) {
logger.debug("Main sink completed, Context: {}", this.operationContextText);
} else {
if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED ||
completeEmitResult == Sinks.EmitResult.FAIL_TERMINATED) {
logger.debug("Main sink already completed, EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
} else {
logger.info(
"Main sink completion failed. EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
}
}
this.shutdown();
} | if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED || | private void completeAllSinks() {
logger.info("Closing all sinks, Context: {}", this.operationContextText);
logger.debug("Executor service shut down, Context: {}", this.operationContextText);
Sinks.EmitResult completeEmitResult = mainSink.tryEmitComplete();
if (completeEmitResult == Sinks.EmitResult.OK) {
logger.debug("Main sink completed, Context: {}", this.operationContextText);
} else {
if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED ||
completeEmitResult == Sinks.EmitResult.FAIL_TERMINATED) {
logger.debug("Main sink already completed, EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
} else {
logger.warn(
"Main sink completion failed. EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
}
}
this.shutdown();
} | class BulkExecutor<TContext> implements Disposable {
private final static Logger logger = LoggerFactory.getLogger(BulkExecutor.class);
private final static AtomicLong instanceCount = new AtomicLong(0);
private final CosmosAsyncContainer container;
private final AsyncDocumentClient docClientWrapper;
private final String operationContextText;
private final OperationContextAndListenerTuple operationListener;
private final ThrottlingRetryOptions throttlingRetryOptions;
private final Flux<com.azure.cosmos.models.CosmosItemOperation> inputOperations;
private final Long maxMicroBatchIntervalInMs;
private final TContext batchContext;
private final ConcurrentMap<String, PartitionScopeThresholds> partitionScopeThresholds;
private final CosmosBulkExecutionOptions cosmosBulkExecutionOptions;
private final AtomicBoolean mainSourceCompleted;
private final AtomicBoolean isDisposed = new AtomicBoolean(false);
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final AtomicInteger totalCount;
private final Sinks.EmitFailureHandler serializedEmitFailureHandler;
private final Sinks.Many<CosmosItemOperation> mainSink;
private final List<FluxSink<CosmosItemOperation>> groupSinks;
private final ScheduledThreadPoolExecutor executorService;
private final CosmosAsyncClient cosmosClient;
private final String bulkSpanName;
private ScheduledFuture<?> scheduledFutureForFlush;
private final String identifier = "BulkExecutor-" + instanceCount.incrementAndGet();
public BulkExecutor(CosmosAsyncContainer container,
Flux<CosmosItemOperation> inputOperations,
CosmosBulkExecutionOptions cosmosBulkOptions) {
checkNotNull(container, "expected non-null container");
checkNotNull(inputOperations, "expected non-null inputOperations");
checkNotNull(cosmosBulkOptions, "expected non-null bulkOptions");
this.cosmosBulkExecutionOptions = cosmosBulkOptions;
this.container = container;
this.bulkSpanName = "nonTransactionalBatch." + this.container.getId();
this.inputOperations = inputOperations;
this.docClientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(container.getDatabase());
this.cosmosClient = ImplementationBridgeHelpers
.CosmosAsyncDatabaseHelper
.getCosmosAsyncDatabaseAccessor()
.getCosmosAsyncClient(container.getDatabase());
this.throttlingRetryOptions = docClientWrapper.getConnectionPolicy().getThrottlingRetryOptions();
maxMicroBatchIntervalInMs = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchInterval(cosmosBulkExecutionOptions)
.toMillis();
batchContext = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getLegacyBatchScopedContext(cosmosBulkExecutionOptions);
this.partitionScopeThresholds = ImplementationBridgeHelpers.CosmosBulkExecutionThresholdsStateHelper
.getBulkExecutionThresholdsAccessor()
.getPartitionScopeThresholds(cosmosBulkExecutionOptions.getThresholdsState());
operationListener = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getOperationContext(cosmosBulkExecutionOptions);
if (operationListener != null &&
operationListener.getOperationContext() != null) {
operationContextText = identifier + "[" + operationListener.getOperationContext().toString() + "]";
} else {
operationContextText = identifier +"[n/a]";
}
mainSourceCompleted = new AtomicBoolean(false);
totalCount = new AtomicInteger(0);
serializedEmitFailureHandler = new SerializedEmitFailureHandler();
mainSink = Sinks.many().unicast().onBackpressureBuffer();
groupSinks = new CopyOnWriteArrayList<>();
this.executorService = new ScheduledThreadPoolExecutor(
1,
new CosmosDaemonThreadFactory(identifier));
this.executorService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.executorService.setRemoveOnCancelPolicy(true);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
this.maxMicroBatchIntervalInMs,
this.maxMicroBatchIntervalInMs,
TimeUnit.MILLISECONDS);
logger.debug("Instantiated BulkExecutor, Context: {}",
this.operationContextText);
}
@Override
public void dispose() {
if (this.isDisposed.compareAndSet(false, true)) {
long totalCountSnapshot = totalCount.get();
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.shutdown();
}
}
}
@Override
public boolean isDisposed() {
return this.isDisposed.get();
}
private void cancelFlushTask() {
ScheduledFuture<?> scheduledFutureSnapshot = this.scheduledFutureForFlush;
if (scheduledFutureSnapshot != null) {
try {
scheduledFutureSnapshot.cancel(true);
logger.debug("Cancelled all future scheduled tasks {}, Context: {}", getThreadInfo(), this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to cancel scheduled tasks{}, Context: {}", getThreadInfo(), this.operationContextText, e);
}
}
}
private void shutdown() {
if (this.isShutdown.compareAndSet(false, true)) {
logger.debug("Shutting down, Context: {}", this.operationContextText);
groupSinks.forEach(FluxSink::complete);
logger.debug("All group sinks completed, Context: {}", this.operationContextText);
this.cancelFlushTask();
try {
logger.debug("Shutting down the executor service, Context: {}", this.operationContextText);
this.executorService.shutdownNow();
logger.debug("Successfully shut down the executor service, Context: {}", this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to shut down the executor service, Context: {}", this.operationContextText, e);
}
}
}
public Flux<CosmosBulkOperationResponse<TContext>> execute() {
return this
.executeCore()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
this.totalCount.get(),
this.operationContextText,
getThreadInfo());
} else {
int itemsLeftSnapshot = this.totalCount.get();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
}
}
this.dispose();
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executeCore() {
Integer nullableMaxConcurrentCosmosPartitions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxConcurrentCosmosPartitions(cosmosBulkExecutionOptions);
Mono<Integer> maxConcurrentCosmosPartitionsMono = nullableMaxConcurrentCosmosPartitions != null ?
Mono.just(Math.max(256, nullableMaxConcurrentCosmosPartitions)) :
this.container.getFeedRanges().map(ranges -> Math.max(256, ranges.size() * 2));
return
maxConcurrentCosmosPartitionsMono
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(maxConcurrentCosmosPartitions -> {
logger.debug("BulkExecutor.execute with MaxConcurrentPartitions: {}, Context: {}",
maxConcurrentCosmosPartitions,
this.operationContextText);
return this.inputOperations
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.onErrorContinue((throwable, o) ->
logger.error("Skipping an error operation while processing {}. Cause: {}, Context: {}",
o,
throwable.getMessage(),
this.operationContextText))
.doOnNext((CosmosItemOperation cosmosItemOperation) -> {
BulkExecutorUtil.setRetryPolicyForBulk(
docClientWrapper,
this.container,
cosmosItemOperation,
this.throttlingRetryOptions);
if (cosmosItemOperation != FlushBuffersItemOperation.singleton()) {
totalCount.incrementAndGet();
}
logger.trace(
"SetupRetryPolicy, {}, TotalCount: {}, Context: {}, {}",
getItemOperationDiagnostics(cosmosItemOperation),
totalCount.get(),
this.operationContextText,
getThreadInfo()
);
})
.doOnComplete(() -> {
mainSourceCompleted.set(true);
long totalCountSnapshot = totalCount.get();
logger.debug("Main source completed -
totalCountSnapshot,
this.operationContextText);
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.cancelFlushTask();
this.onFlush();
long flushIntervalAfterDrainingIncomingFlux = Math.min(
this.maxMicroBatchIntervalInMs,
BatchRequestResponseConstants
.DEFAULT_MAX_MICRO_BATCH_INTERVAL_AFTER_DRAINING_INCOMING_FLUX_IN_MILLISECONDS);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
flushIntervalAfterDrainingIncomingFlux,
flushIntervalAfterDrainingIncomingFlux,
TimeUnit.MILLISECONDS);
logger.debug("Scheduled new flush operation {}, Context: {}", getThreadInfo(), this.operationContextText);
}
})
.mergeWith(mainSink.asFlux())
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap(
operation -> {
logger.trace("Before Resolve PkRangeId, {}, Context: {} {}",
getItemOperationDiagnostics(operation),
this.operationContextText,
getThreadInfo());
return BulkExecutorUtil.resolvePartitionKeyRangeId(this.docClientWrapper, this.container, operation)
.map((String pkRangeId) -> {
PartitionScopeThresholds partitionScopeThresholds =
this.partitionScopeThresholds.computeIfAbsent(
pkRangeId,
(newPkRangeId) -> new PartitionScopeThresholds(newPkRangeId, this.cosmosBulkExecutionOptions));
logger.trace("Resolved PkRangeId, {}, PKRangeId: {} Context: {} {}",
getItemOperationDiagnostics(operation),
pkRangeId,
this.operationContextText,
getThreadInfo());
return Pair.of(partitionScopeThresholds, operation);
});
})
.groupBy(Pair::getKey, Pair::getValue)
.flatMap(
this::executePartitionedGroup,
maxConcurrentCosmosPartitions)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.doOnNext(requestAndResponse -> {
int totalCountAfterDecrement = totalCount.decrementAndGet();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountAfterDecrement == 0 && mainSourceCompletedSnapshot) {
logger.debug("All work completed, {}, TotalCount: {}, Context: {} {}",
getItemOperationDiagnostics(requestAndResponse.getOperation()),
totalCountAfterDecrement,
this.operationContextText,
getThreadInfo());
completeAllSinks();
} else {
if (totalCountAfterDecrement == 0) {
logger.debug(
"No Work left - but mainSource not yet completed, Context: {} {}",
this.operationContextText,
getThreadInfo());
}
logger.trace(
"Work left - TotalCount after decrement: {}, main sink completed {}, {}, Context: {} {}",
totalCountAfterDecrement,
mainSourceCompletedSnapshot,
getItemOperationDiagnostics(requestAndResponse.getOperation()),
this.operationContextText,
getThreadInfo());
}
})
.doOnComplete(() -> {
int totalCountSnapshot = totalCount.get();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountSnapshot == 0 && mainSourceCompletedSnapshot) {
logger.debug("DoOnComplete: All work completed, Context: {}", this.operationContextText);
completeAllSinks();
} else {
logger.debug(
"DoOnComplete: Work left - TotalCount after decrement: {}, main sink completed {}, Context: {} {}",
totalCountSnapshot,
mainSourceCompletedSnapshot,
this.operationContextText,
getThreadInfo());
}
});
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionedGroup(
GroupedFlux<PartitionScopeThresholds, CosmosItemOperation> partitionedGroupFluxOfInputOperations) {
final PartitionScopeThresholds thresholds = partitionedGroupFluxOfInputOperations.key();
final FluxProcessor<CosmosItemOperation, CosmosItemOperation> groupFluxProcessor =
UnicastProcessor.<CosmosItemOperation>create().serialize();
final FluxSink<CosmosItemOperation> groupSink = groupFluxProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
groupSinks.add(groupSink);
AtomicLong firstRecordTimeStamp = new AtomicLong(-1);
AtomicLong currentMicroBatchSize = new AtomicLong(0);
AtomicInteger currentTotalSerializedLength = new AtomicInteger(0);
return partitionedGroupFluxOfInputOperations
.mergeWith(groupFluxProcessor)
.onBackpressureBuffer()
.timestamp()
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.bufferUntil(timeStampItemOperationTuple -> {
long timestamp = timeStampItemOperationTuple.getT1();
CosmosItemOperation itemOperation = timeStampItemOperationTuple.getT2();
logger.trace(
"BufferUntil - enqueued {}, {}, Context: {} {}",
timestamp,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (itemOperation == FlushBuffersItemOperation.singleton()) {
long currentMicroBatchSizeSnapshot = currentMicroBatchSize.get();
if (currentMicroBatchSizeSnapshot > 0) {
logger.trace(
"Flushing PKRange {} (batch size: {}) due to FlushItemOperation, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
currentMicroBatchSizeSnapshot,
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
}
firstRecordTimeStamp.compareAndSet(-1, timestamp);
long age = timestamp - firstRecordTimeStamp.get();
long batchSize = currentMicroBatchSize.incrementAndGet();
int totalSerializedLength = this.calculateTotalSerializedLength(currentTotalSerializedLength, itemOperation);
if (batchSize >= thresholds.getTargetMicroBatchSizeSnapshot() ||
age >= this.maxMicroBatchIntervalInMs ||
totalSerializedLength >= BatchRequestResponseConstants.MAX_DIRECT_MODE_BATCH_REQUEST_BODY_SIZE_IN_BYTES) {
logger.debug(
"BufferUntil - Flushing PKRange {} due to BatchSize ({}), payload size ({}) or age ({}), " +
"Triggering {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
batchSize,
totalSerializedLength,
age,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
})
.flatMap(
(List<Tuple2<Long, CosmosItemOperation>> timeStampAndItemOperationTuples) -> {
List<CosmosItemOperation> operations = new ArrayList<>(timeStampAndItemOperationTuples.size());
for (Tuple2<Long, CosmosItemOperation> timeStampAndItemOperationTuple :
timeStampAndItemOperationTuples) {
CosmosItemOperation itemOperation = timeStampAndItemOperationTuple.getT2();
if (itemOperation == FlushBuffersItemOperation.singleton()) {
continue;
}
operations.add(itemOperation);
}
logger.debug(
"Flushing PKRange {} micro batch with {} operations, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
operations.size(),
this.operationContextText,
getThreadInfo());
return executeOperations(operations, thresholds, groupSink);
},
ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchConcurrency(this.cosmosBulkExecutionOptions));
}
private int calculateTotalSerializedLength(AtomicInteger currentTotalSerializedLength, CosmosItemOperation item) {
if (item instanceof CosmosItemOperationBase) {
return currentTotalSerializedLength.accumulateAndGet(
((CosmosItemOperationBase) item).getSerializedLength(),
(currentValue, incremental) -> currentValue + incremental);
}
return currentTotalSerializedLength.get();
}
private Flux<CosmosBulkOperationResponse<TContext>> executeOperations(
List<CosmosItemOperation> operations,
PartitionScopeThresholds thresholds,
FluxSink<CosmosItemOperation> groupSink) {
if (operations.size() == 0) {
logger.trace("Empty operations list, Context: {}", this.operationContextText);
return Flux.empty();
}
String pkRange = thresholds.getPartitionKeyRangeId();
ServerOperationBatchRequest serverOperationBatchRequest =
BulkExecutorUtil.createBatchRequest(operations, pkRange);
if (serverOperationBatchRequest.getBatchPendingOperations().size() > 0) {
serverOperationBatchRequest.getBatchPendingOperations().forEach(groupSink::next);
}
return Flux.just(serverOperationBatchRequest.getBatchRequest())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((PartitionKeyRangeServerBatchRequest serverRequest) ->
this.executePartitionKeyRangeServerBatchRequest(serverRequest, groupSink, thresholds));
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionKeyRangeServerBatchRequest(
PartitionKeyRangeServerBatchRequest serverRequest,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
return this.executeBatchRequest(serverRequest)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(response ->
Flux
.fromIterable(response.getResults())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosBatchOperationResult result) ->
handleTransactionalBatchOperationResult(response, result, groupSink, thresholds)))
.onErrorResume((Throwable throwable) -> {
if (!(throwable instanceof Exception)) {
throw Exceptions.propagate(throwable);
}
Exception exception = (Exception) throwable;
return Flux
.fromIterable(serverRequest.getOperations())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosItemOperation itemOperation) ->
handleTransactionalBatchExecutionException(itemOperation, exception, groupSink, thresholds));
});
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchOperationResult(
CosmosBatchResponse response,
CosmosBatchOperationResult operationResult,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
CosmosBulkItemResponse cosmosBulkItemResponse = ModelBridgeInternal
.createCosmosBulkItemResponse(operationResult, response);
CosmosItemOperation itemOperation = operationResult.getOperation();
TContext actualContext = this.getActualContext(itemOperation);
logger.debug(
"HandleTransactionalBatchOperationResult - PKRange {}, Response Status Code {}, " +
"Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (!operationResult.isSuccessStatusCode()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy().shouldRetry(operationResult).flatMap(
result -> {
if (result.shouldRetry) {
logger.debug(
"HandleTransactionalBatchOperationResult - enqueue retry, PKRange {}, Response " +
"Status Code {}, Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return this.enqueueForRetry(result.backOffTime, groupSink, itemOperation, thresholds);
} else {
if (response.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
response.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED) {
logger.debug(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
} else {
logger.error(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
}
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, cosmosBulkItemResponse, actualContext));
}
});
} else {
throw new UnsupportedOperationException("Unknown CosmosItemOperation.");
}
}
thresholds.recordSuccessfulOperation();
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation,
cosmosBulkItemResponse,
actualContext));
}
private TContext getActualContext(CosmosItemOperation itemOperation) {
ItemBulkOperation<?, ?> itemBulkOperation = null;
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
}
if (itemBulkOperation == null) {
return this.batchContext;
}
TContext operationContext = itemBulkOperation.getContext();
if (operationContext != null) {
return operationContext;
}
return this.batchContext;
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchExecutionException(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
logger.debug(
"HandleTransactionalBatchExecutionException, PKRange {}, Error: {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (exception instanceof CosmosException && itemOperation instanceof ItemBulkOperation<?, ?>) {
CosmosException cosmosException = (CosmosException) exception;
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy()
.shouldRetryForGone(cosmosException.getStatusCode(), cosmosException.getSubStatusCode())
.flatMap(shouldRetryGone -> {
if (shouldRetryGone) {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry due to split, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
mainSink.emitNext(itemOperation, serializedEmitFailureHandler);
return Mono.empty();
} else {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry other, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return retryOtherExceptions(
itemOperation,
exception,
groupSink,
cosmosException,
itemBulkOperation,
thresholds);
}
});
}
TContext actualContext = this.getActualContext(itemOperation);
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(itemOperation, exception, actualContext));
}
private Mono<CosmosBulkOperationResponse<TContext>> enqueueForRetry(
Duration backOffTime,
FluxSink<CosmosItemOperation> groupSink,
CosmosItemOperation itemOperation,
PartitionScopeThresholds thresholds) {
thresholds.recordEnqueuedRetry();
if (backOffTime == null || backOffTime.isZero()) {
groupSink.next(itemOperation);
return Mono.empty();
} else {
return Mono
.delay(backOffTime)
.flatMap((dummy) -> {
groupSink.next(itemOperation);
return Mono.empty();
});
}
}
private Mono<CosmosBulkOperationResponse<TContext>> retryOtherExceptions(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
CosmosException cosmosException,
ItemBulkOperation<?, ?> itemBulkOperation,
PartitionScopeThresholds thresholds) {
TContext actualContext = this.getActualContext(itemOperation);
return itemBulkOperation.getRetryPolicy().shouldRetry(cosmosException).flatMap(result -> {
if (result.shouldRetry) {
return this.enqueueForRetry(result.backOffTime, groupSink, itemBulkOperation, thresholds);
} else {
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, exception, actualContext));
}
});
}
private Mono<CosmosBatchResponse> executeBatchRequest(PartitionKeyRangeServerBatchRequest serverRequest) {
RequestOptions options = new RequestOptions();
options.setThroughputControlGroupName(cosmosBulkExecutionOptions.getThroughputControlGroupName());
Map<String, String> customOptions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getCustomOptions(cosmosBulkExecutionOptions);
if (customOptions != null && !customOptions.isEmpty()) {
for(Map.Entry<String, String> entry : customOptions.entrySet()) {
options.setHeader(entry.getKey(), entry.getValue());
}
}
options.setOperationContextAndListenerTuple(operationListener);
if (!this.docClientWrapper.isContentResponseOnWriteEnabled() &&
serverRequest.getOperations().size() > 0) {
for (CosmosItemOperation itemOperation : serverRequest.getOperations()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
if (itemBulkOperation.getOperationType() == CosmosItemOperationType.READ ||
(itemBulkOperation.getRequestOptions() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled().booleanValue())) {
options.setContentResponseOnWriteEnabled(true);
break;
}
}
}
}
return withContext(context -> {
final Mono<CosmosBatchResponse> responseMono = this.docClientWrapper.executeBatchRequest(
BridgeInternal.getLink(this.container), serverRequest, options, false);
return BridgeInternal.getTracerProvider(this.cosmosClient)
.traceEnabledBatchResponsePublisher(
responseMono,
context,
this.bulkSpanName,
this.container.getId(),
this.container.getDatabase().getId(),
this.cosmosClient,
options.getConsistencyLevel(),
OperationType.Batch,
ResourceType.Document);
});
}
private void onFlush() {
try {
this.groupSinks.forEach(sink -> sink.next(FlushBuffersItemOperation.singleton()));
} catch(Throwable t) {
logger.error("Callback invocation 'onFlush' failed. Context: {}", this.operationContextText, t);
}
}
private static String getItemOperationDiagnostics(CosmosItemOperation operation) {
if (operation == FlushBuffersItemOperation.singleton()) {
return "ItemOperation[Type: Flush]";
}
StringBuilder sb = new StringBuilder();
sb
.append("ItemOperation[Type: ")
.append(operation.getOperationType().toString())
.append(", PK: ")
.append(operation.getPartitionKeyValue() != null ? operation.getPartitionKeyValue().toString() : "n/a")
.append(", id: ")
.append(operation.getId())
.append("]");
return sb.toString();
}
private static String getThreadInfo() {
StringBuilder sb = new StringBuilder();
Thread t = Thread.currentThread();
sb
.append("Thread[")
.append("Name: ")
.append(t.getName())
.append(",Group: ")
.append(t.getThreadGroup() != null ? t.getThreadGroup().getName() : "n/a")
.append(", isDaemon: ")
.append(t.isDaemon())
.append(", Id: ")
.append(t.getId())
.append("]");
return sb.toString();
}
private class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {
@Override
public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {
if (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {
logger.debug("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return true;
}
logger.error("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return false;
}
}
} | class BulkExecutor<TContext> implements Disposable {
private final static Logger logger = LoggerFactory.getLogger(BulkExecutor.class);
private final static AtomicLong instanceCount = new AtomicLong(0);
private final CosmosAsyncContainer container;
private final AsyncDocumentClient docClientWrapper;
private final String operationContextText;
private final OperationContextAndListenerTuple operationListener;
private final ThrottlingRetryOptions throttlingRetryOptions;
private final Flux<com.azure.cosmos.models.CosmosItemOperation> inputOperations;
private final Long maxMicroBatchIntervalInMs;
private final TContext batchContext;
private final ConcurrentMap<String, PartitionScopeThresholds> partitionScopeThresholds;
private final CosmosBulkExecutionOptions cosmosBulkExecutionOptions;
private final AtomicBoolean mainSourceCompleted;
private final AtomicBoolean isDisposed = new AtomicBoolean(false);
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final AtomicInteger totalCount;
private final Sinks.EmitFailureHandler serializedEmitFailureHandler;
private final Sinks.Many<CosmosItemOperation> mainSink;
private final List<FluxSink<CosmosItemOperation>> groupSinks;
private final ScheduledThreadPoolExecutor executorService;
private final CosmosAsyncClient cosmosClient;
private final String bulkSpanName;
private ScheduledFuture<?> scheduledFutureForFlush;
private final String identifier = "BulkExecutor-" + instanceCount.incrementAndGet();
public BulkExecutor(CosmosAsyncContainer container,
Flux<CosmosItemOperation> inputOperations,
CosmosBulkExecutionOptions cosmosBulkOptions) {
checkNotNull(container, "expected non-null container");
checkNotNull(inputOperations, "expected non-null inputOperations");
checkNotNull(cosmosBulkOptions, "expected non-null bulkOptions");
this.cosmosBulkExecutionOptions = cosmosBulkOptions;
this.container = container;
this.bulkSpanName = "nonTransactionalBatch." + this.container.getId();
this.inputOperations = inputOperations;
this.docClientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(container.getDatabase());
this.cosmosClient = ImplementationBridgeHelpers
.CosmosAsyncDatabaseHelper
.getCosmosAsyncDatabaseAccessor()
.getCosmosAsyncClient(container.getDatabase());
this.throttlingRetryOptions = docClientWrapper.getConnectionPolicy().getThrottlingRetryOptions();
maxMicroBatchIntervalInMs = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchInterval(cosmosBulkExecutionOptions)
.toMillis();
batchContext = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getLegacyBatchScopedContext(cosmosBulkExecutionOptions);
this.partitionScopeThresholds = ImplementationBridgeHelpers.CosmosBulkExecutionThresholdsStateHelper
.getBulkExecutionThresholdsAccessor()
.getPartitionScopeThresholds(cosmosBulkExecutionOptions.getThresholdsState());
operationListener = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getOperationContext(cosmosBulkExecutionOptions);
if (operationListener != null &&
operationListener.getOperationContext() != null) {
operationContextText = identifier + "[" + operationListener.getOperationContext().toString() + "]";
} else {
operationContextText = identifier +"[n/a]";
}
mainSourceCompleted = new AtomicBoolean(false);
totalCount = new AtomicInteger(0);
serializedEmitFailureHandler = new SerializedEmitFailureHandler();
mainSink = Sinks.many().unicast().onBackpressureBuffer();
groupSinks = new CopyOnWriteArrayList<>();
this.executorService = new ScheduledThreadPoolExecutor(
1,
new CosmosDaemonThreadFactory(identifier));
this.executorService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.executorService.setRemoveOnCancelPolicy(true);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
this.maxMicroBatchIntervalInMs,
this.maxMicroBatchIntervalInMs,
TimeUnit.MILLISECONDS);
logger.debug("Instantiated BulkExecutor, Context: {}",
this.operationContextText);
}
@Override
public void dispose() {
if (this.isDisposed.compareAndSet(false, true)) {
long totalCountSnapshot = totalCount.get();
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.shutdown();
}
}
}
@Override
public boolean isDisposed() {
return this.isDisposed.get();
}
private void cancelFlushTask() {
ScheduledFuture<?> scheduledFutureSnapshot = this.scheduledFutureForFlush;
if (scheduledFutureSnapshot != null) {
try {
scheduledFutureSnapshot.cancel(true);
logger.debug("Cancelled all future scheduled tasks {}, Context: {}", getThreadInfo(), this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to cancel scheduled tasks{}, Context: {}", getThreadInfo(), this.operationContextText, e);
}
}
}
private void shutdown() {
if (this.isShutdown.compareAndSet(false, true)) {
logger.debug("Shutting down, Context: {}", this.operationContextText);
groupSinks.forEach(FluxSink::complete);
logger.debug("All group sinks completed, Context: {}", this.operationContextText);
this.cancelFlushTask();
try {
logger.debug("Shutting down the executor service, Context: {}", this.operationContextText);
this.executorService.shutdownNow();
logger.debug("Successfully shut down the executor service, Context: {}", this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to shut down the executor service, Context: {}", this.operationContextText, e);
}
}
}
public Flux<CosmosBulkOperationResponse<TContext>> execute() {
return this
.executeCore()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
this.totalCount.get(),
this.operationContextText,
getThreadInfo());
} else {
int itemsLeftSnapshot = this.totalCount.get();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
}
}
this.dispose();
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executeCore() {
Integer nullableMaxConcurrentCosmosPartitions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxConcurrentCosmosPartitions(cosmosBulkExecutionOptions);
Mono<Integer> maxConcurrentCosmosPartitionsMono = nullableMaxConcurrentCosmosPartitions != null ?
Mono.just(Math.max(256, nullableMaxConcurrentCosmosPartitions)) :
this.container.getFeedRanges().map(ranges -> Math.max(256, ranges.size() * 2));
return
maxConcurrentCosmosPartitionsMono
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(maxConcurrentCosmosPartitions -> {
logger.debug("BulkExecutor.execute with MaxConcurrentPartitions: {}, Context: {}",
maxConcurrentCosmosPartitions,
this.operationContextText);
return this.inputOperations
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.onErrorContinue((throwable, o) ->
logger.error("Skipping an error operation while processing {}. Cause: {}, Context: {}",
o,
throwable.getMessage(),
this.operationContextText))
.doOnNext((CosmosItemOperation cosmosItemOperation) -> {
BulkExecutorUtil.setRetryPolicyForBulk(
docClientWrapper,
this.container,
cosmosItemOperation,
this.throttlingRetryOptions);
if (cosmosItemOperation != FlushBuffersItemOperation.singleton()) {
totalCount.incrementAndGet();
}
logger.trace(
"SetupRetryPolicy, {}, TotalCount: {}, Context: {}, {}",
getItemOperationDiagnostics(cosmosItemOperation),
totalCount.get(),
this.operationContextText,
getThreadInfo()
);
})
.doOnComplete(() -> {
mainSourceCompleted.set(true);
long totalCountSnapshot = totalCount.get();
logger.debug("Main source completed -
totalCountSnapshot,
this.operationContextText);
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.cancelFlushTask();
this.onFlush();
long flushIntervalAfterDrainingIncomingFlux = Math.min(
this.maxMicroBatchIntervalInMs,
BatchRequestResponseConstants
.DEFAULT_MAX_MICRO_BATCH_INTERVAL_AFTER_DRAINING_INCOMING_FLUX_IN_MILLISECONDS);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
flushIntervalAfterDrainingIncomingFlux,
flushIntervalAfterDrainingIncomingFlux,
TimeUnit.MILLISECONDS);
logger.debug("Scheduled new flush operation {}, Context: {}", getThreadInfo(), this.operationContextText);
}
})
.mergeWith(mainSink.asFlux())
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap(
operation -> {
logger.trace("Before Resolve PkRangeId, {}, Context: {} {}",
getItemOperationDiagnostics(operation),
this.operationContextText,
getThreadInfo());
return BulkExecutorUtil.resolvePartitionKeyRangeId(this.docClientWrapper, this.container, operation)
.map((String pkRangeId) -> {
PartitionScopeThresholds partitionScopeThresholds =
this.partitionScopeThresholds.computeIfAbsent(
pkRangeId,
(newPkRangeId) -> new PartitionScopeThresholds(newPkRangeId, this.cosmosBulkExecutionOptions));
logger.trace("Resolved PkRangeId, {}, PKRangeId: {} Context: {} {}",
getItemOperationDiagnostics(operation),
pkRangeId,
this.operationContextText,
getThreadInfo());
return Pair.of(partitionScopeThresholds, operation);
});
})
.groupBy(Pair::getKey, Pair::getValue)
.flatMap(
this::executePartitionedGroup,
maxConcurrentCosmosPartitions)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.doOnNext(requestAndResponse -> {
int totalCountAfterDecrement = totalCount.decrementAndGet();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountAfterDecrement == 0 && mainSourceCompletedSnapshot) {
logger.debug("All work completed, {}, TotalCount: {}, Context: {} {}",
getItemOperationDiagnostics(requestAndResponse.getOperation()),
totalCountAfterDecrement,
this.operationContextText,
getThreadInfo());
completeAllSinks();
} else {
if (totalCountAfterDecrement == 0) {
logger.debug(
"No Work left - but mainSource not yet completed, Context: {} {}",
this.operationContextText,
getThreadInfo());
}
logger.trace(
"Work left - TotalCount after decrement: {}, main sink completed {}, {}, Context: {} {}",
totalCountAfterDecrement,
mainSourceCompletedSnapshot,
getItemOperationDiagnostics(requestAndResponse.getOperation()),
this.operationContextText,
getThreadInfo());
}
})
.doOnComplete(() -> {
int totalCountSnapshot = totalCount.get();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountSnapshot == 0 && mainSourceCompletedSnapshot) {
logger.debug("DoOnComplete: All work completed, Context: {}", this.operationContextText);
completeAllSinks();
} else {
logger.debug(
"DoOnComplete: Work left - TotalCount after decrement: {}, main sink completed {}, Context: {} {}",
totalCountSnapshot,
mainSourceCompletedSnapshot,
this.operationContextText,
getThreadInfo());
}
});
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionedGroup(
GroupedFlux<PartitionScopeThresholds, CosmosItemOperation> partitionedGroupFluxOfInputOperations) {
final PartitionScopeThresholds thresholds = partitionedGroupFluxOfInputOperations.key();
final FluxProcessor<CosmosItemOperation, CosmosItemOperation> groupFluxProcessor =
UnicastProcessor.<CosmosItemOperation>create().serialize();
final FluxSink<CosmosItemOperation> groupSink = groupFluxProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
groupSinks.add(groupSink);
AtomicLong firstRecordTimeStamp = new AtomicLong(-1);
AtomicLong currentMicroBatchSize = new AtomicLong(0);
AtomicInteger currentTotalSerializedLength = new AtomicInteger(0);
return partitionedGroupFluxOfInputOperations
.mergeWith(groupFluxProcessor)
.onBackpressureBuffer()
.timestamp()
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.bufferUntil(timeStampItemOperationTuple -> {
long timestamp = timeStampItemOperationTuple.getT1();
CosmosItemOperation itemOperation = timeStampItemOperationTuple.getT2();
logger.trace(
"BufferUntil - enqueued {}, {}, Context: {} {}",
timestamp,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (itemOperation == FlushBuffersItemOperation.singleton()) {
long currentMicroBatchSizeSnapshot = currentMicroBatchSize.get();
if (currentMicroBatchSizeSnapshot > 0) {
logger.trace(
"Flushing PKRange {} (batch size: {}) due to FlushItemOperation, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
currentMicroBatchSizeSnapshot,
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
}
firstRecordTimeStamp.compareAndSet(-1, timestamp);
long age = timestamp - firstRecordTimeStamp.get();
long batchSize = currentMicroBatchSize.incrementAndGet();
int totalSerializedLength = this.calculateTotalSerializedLength(currentTotalSerializedLength, itemOperation);
if (batchSize >= thresholds.getTargetMicroBatchSizeSnapshot() ||
age >= this.maxMicroBatchIntervalInMs ||
totalSerializedLength >= BatchRequestResponseConstants.MAX_DIRECT_MODE_BATCH_REQUEST_BODY_SIZE_IN_BYTES) {
logger.debug(
"BufferUntil - Flushing PKRange {} due to BatchSize ({}), payload size ({}) or age ({}), " +
"Triggering {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
batchSize,
totalSerializedLength,
age,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
})
.flatMap(
(List<Tuple2<Long, CosmosItemOperation>> timeStampAndItemOperationTuples) -> {
List<CosmosItemOperation> operations = new ArrayList<>(timeStampAndItemOperationTuples.size());
for (Tuple2<Long, CosmosItemOperation> timeStampAndItemOperationTuple :
timeStampAndItemOperationTuples) {
CosmosItemOperation itemOperation = timeStampAndItemOperationTuple.getT2();
if (itemOperation == FlushBuffersItemOperation.singleton()) {
continue;
}
operations.add(itemOperation);
}
logger.debug(
"Flushing PKRange {} micro batch with {} operations, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
operations.size(),
this.operationContextText,
getThreadInfo());
return executeOperations(operations, thresholds, groupSink);
},
ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchConcurrency(this.cosmosBulkExecutionOptions));
}
private int calculateTotalSerializedLength(AtomicInteger currentTotalSerializedLength, CosmosItemOperation item) {
if (item instanceof CosmosItemOperationBase) {
return currentTotalSerializedLength.accumulateAndGet(
((CosmosItemOperationBase) item).getSerializedLength(),
(currentValue, incremental) -> currentValue + incremental);
}
return currentTotalSerializedLength.get();
}
private Flux<CosmosBulkOperationResponse<TContext>> executeOperations(
List<CosmosItemOperation> operations,
PartitionScopeThresholds thresholds,
FluxSink<CosmosItemOperation> groupSink) {
if (operations.size() == 0) {
logger.trace("Empty operations list, Context: {}", this.operationContextText);
return Flux.empty();
}
String pkRange = thresholds.getPartitionKeyRangeId();
ServerOperationBatchRequest serverOperationBatchRequest =
BulkExecutorUtil.createBatchRequest(operations, pkRange);
if (serverOperationBatchRequest.getBatchPendingOperations().size() > 0) {
serverOperationBatchRequest.getBatchPendingOperations().forEach(groupSink::next);
}
return Flux.just(serverOperationBatchRequest.getBatchRequest())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((PartitionKeyRangeServerBatchRequest serverRequest) ->
this.executePartitionKeyRangeServerBatchRequest(serverRequest, groupSink, thresholds));
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionKeyRangeServerBatchRequest(
PartitionKeyRangeServerBatchRequest serverRequest,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
return this.executeBatchRequest(serverRequest)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(response ->
Flux
.fromIterable(response.getResults())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosBatchOperationResult result) ->
handleTransactionalBatchOperationResult(response, result, groupSink, thresholds)))
.onErrorResume((Throwable throwable) -> {
if (!(throwable instanceof Exception)) {
throw Exceptions.propagate(throwable);
}
Exception exception = (Exception) throwable;
return Flux
.fromIterable(serverRequest.getOperations())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosItemOperation itemOperation) ->
handleTransactionalBatchExecutionException(itemOperation, exception, groupSink, thresholds));
});
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchOperationResult(
CosmosBatchResponse response,
CosmosBatchOperationResult operationResult,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
CosmosBulkItemResponse cosmosBulkItemResponse = ModelBridgeInternal
.createCosmosBulkItemResponse(operationResult, response);
CosmosItemOperation itemOperation = operationResult.getOperation();
TContext actualContext = this.getActualContext(itemOperation);
logger.debug(
"HandleTransactionalBatchOperationResult - PKRange {}, Response Status Code {}, " +
"Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (!operationResult.isSuccessStatusCode()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy().shouldRetry(operationResult).flatMap(
result -> {
if (result.shouldRetry) {
logger.debug(
"HandleTransactionalBatchOperationResult - enqueue retry, PKRange {}, Response " +
"Status Code {}, Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return this.enqueueForRetry(result.backOffTime, groupSink, itemOperation, thresholds);
} else {
if (response.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
response.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED) {
logger.debug(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
} else {
logger.error(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
}
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, cosmosBulkItemResponse, actualContext));
}
});
} else {
throw new UnsupportedOperationException("Unknown CosmosItemOperation.");
}
}
thresholds.recordSuccessfulOperation();
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation,
cosmosBulkItemResponse,
actualContext));
}
private TContext getActualContext(CosmosItemOperation itemOperation) {
ItemBulkOperation<?, ?> itemBulkOperation = null;
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
}
if (itemBulkOperation == null) {
return this.batchContext;
}
TContext operationContext = itemBulkOperation.getContext();
if (operationContext != null) {
return operationContext;
}
return this.batchContext;
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchExecutionException(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
logger.debug(
"HandleTransactionalBatchExecutionException, PKRange {}, Error: {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (exception instanceof CosmosException && itemOperation instanceof ItemBulkOperation<?, ?>) {
CosmosException cosmosException = (CosmosException) exception;
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy()
.shouldRetryForGone(cosmosException.getStatusCode(), cosmosException.getSubStatusCode())
.flatMap(shouldRetryGone -> {
if (shouldRetryGone) {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry due to split, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
mainSink.emitNext(itemOperation, serializedEmitFailureHandler);
return Mono.empty();
} else {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry other, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return retryOtherExceptions(
itemOperation,
exception,
groupSink,
cosmosException,
itemBulkOperation,
thresholds);
}
});
}
TContext actualContext = this.getActualContext(itemOperation);
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(itemOperation, exception, actualContext));
}
private Mono<CosmosBulkOperationResponse<TContext>> enqueueForRetry(
Duration backOffTime,
FluxSink<CosmosItemOperation> groupSink,
CosmosItemOperation itemOperation,
PartitionScopeThresholds thresholds) {
thresholds.recordEnqueuedRetry();
if (backOffTime == null || backOffTime.isZero()) {
groupSink.next(itemOperation);
return Mono.empty();
} else {
return Mono
.delay(backOffTime)
.flatMap((dummy) -> {
groupSink.next(itemOperation);
return Mono.empty();
});
}
}
private Mono<CosmosBulkOperationResponse<TContext>> retryOtherExceptions(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
CosmosException cosmosException,
ItemBulkOperation<?, ?> itemBulkOperation,
PartitionScopeThresholds thresholds) {
TContext actualContext = this.getActualContext(itemOperation);
return itemBulkOperation.getRetryPolicy().shouldRetry(cosmosException).flatMap(result -> {
if (result.shouldRetry) {
return this.enqueueForRetry(result.backOffTime, groupSink, itemBulkOperation, thresholds);
} else {
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, exception, actualContext));
}
});
}
private Mono<CosmosBatchResponse> executeBatchRequest(PartitionKeyRangeServerBatchRequest serverRequest) {
RequestOptions options = new RequestOptions();
options.setThroughputControlGroupName(cosmosBulkExecutionOptions.getThroughputControlGroupName());
Map<String, String> customOptions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getCustomOptions(cosmosBulkExecutionOptions);
if (customOptions != null && !customOptions.isEmpty()) {
for(Map.Entry<String, String> entry : customOptions.entrySet()) {
options.setHeader(entry.getKey(), entry.getValue());
}
}
options.setOperationContextAndListenerTuple(operationListener);
if (!this.docClientWrapper.isContentResponseOnWriteEnabled() &&
serverRequest.getOperations().size() > 0) {
for (CosmosItemOperation itemOperation : serverRequest.getOperations()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
if (itemBulkOperation.getOperationType() == CosmosItemOperationType.READ ||
(itemBulkOperation.getRequestOptions() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled().booleanValue())) {
options.setContentResponseOnWriteEnabled(true);
break;
}
}
}
}
return withContext(context -> {
final Mono<CosmosBatchResponse> responseMono = this.docClientWrapper.executeBatchRequest(
BridgeInternal.getLink(this.container), serverRequest, options, false);
return BridgeInternal.getTracerProvider(this.cosmosClient)
.traceEnabledBatchResponsePublisher(
responseMono,
context,
this.bulkSpanName,
this.container.getId(),
this.container.getDatabase().getId(),
this.cosmosClient,
options.getConsistencyLevel(),
OperationType.Batch,
ResourceType.Document);
});
}
private void onFlush() {
try {
this.groupSinks.forEach(sink -> sink.next(FlushBuffersItemOperation.singleton()));
} catch(Throwable t) {
logger.error("Callback invocation 'onFlush' failed. Context: {}", this.operationContextText, t);
}
}
private static String getItemOperationDiagnostics(CosmosItemOperation operation) {
if (operation == FlushBuffersItemOperation.singleton()) {
return "ItemOperation[Type: Flush]";
}
StringBuilder sb = new StringBuilder();
sb
.append("ItemOperation[Type: ")
.append(operation.getOperationType().toString())
.append(", PK: ")
.append(operation.getPartitionKeyValue() != null ? operation.getPartitionKeyValue().toString() : "n/a")
.append(", id: ")
.append(operation.getId())
.append("]");
return sb.toString();
}
private static String getThreadInfo() {
StringBuilder sb = new StringBuilder();
Thread t = Thread.currentThread();
sb
.append("Thread[")
.append("Name: ")
.append(t.getName())
.append(",Group: ")
.append(t.getThreadGroup() != null ? t.getThreadGroup().getName() : "n/a")
.append(", isDaemon: ")
.append(t.isDaemon())
.append(", Id: ")
.append(t.getId())
.append("]");
return sb.toString();
}
private class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {
@Override
public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {
if (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {
logger.debug("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return true;
}
logger.error("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return false;
}
}
} |
No - 862 being info is helpful - because it helps debugging in scenarios where some ingestion tasks take a long time (hang vs. long running task) | private void completeAllSinks() {
logger.info("Closing all sinks, Context: {}", this.operationContextText);
logger.debug("Executor service shut down, Context: {}", this.operationContextText);
Sinks.EmitResult completeEmitResult = mainSink.tryEmitComplete();
if (completeEmitResult == Sinks.EmitResult.OK) {
logger.debug("Main sink completed, Context: {}", this.operationContextText);
} else {
if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED ||
completeEmitResult == Sinks.EmitResult.FAIL_TERMINATED) {
logger.debug("Main sink already completed, EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
} else {
logger.info(
"Main sink completion failed. EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
}
}
this.shutdown();
} | if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED || | private void completeAllSinks() {
logger.info("Closing all sinks, Context: {}", this.operationContextText);
logger.debug("Executor service shut down, Context: {}", this.operationContextText);
Sinks.EmitResult completeEmitResult = mainSink.tryEmitComplete();
if (completeEmitResult == Sinks.EmitResult.OK) {
logger.debug("Main sink completed, Context: {}", this.operationContextText);
} else {
if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED ||
completeEmitResult == Sinks.EmitResult.FAIL_TERMINATED) {
logger.debug("Main sink already completed, EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
} else {
logger.warn(
"Main sink completion failed. EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
}
}
this.shutdown();
} | class BulkExecutor<TContext> implements Disposable {
private final static Logger logger = LoggerFactory.getLogger(BulkExecutor.class);
private final static AtomicLong instanceCount = new AtomicLong(0);
private final CosmosAsyncContainer container;
private final AsyncDocumentClient docClientWrapper;
private final String operationContextText;
private final OperationContextAndListenerTuple operationListener;
private final ThrottlingRetryOptions throttlingRetryOptions;
private final Flux<com.azure.cosmos.models.CosmosItemOperation> inputOperations;
private final Long maxMicroBatchIntervalInMs;
private final TContext batchContext;
private final ConcurrentMap<String, PartitionScopeThresholds> partitionScopeThresholds;
private final CosmosBulkExecutionOptions cosmosBulkExecutionOptions;
private final AtomicBoolean mainSourceCompleted;
private final AtomicBoolean isDisposed = new AtomicBoolean(false);
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final AtomicInteger totalCount;
private final Sinks.EmitFailureHandler serializedEmitFailureHandler;
private final Sinks.Many<CosmosItemOperation> mainSink;
private final List<FluxSink<CosmosItemOperation>> groupSinks;
private final ScheduledThreadPoolExecutor executorService;
private final CosmosAsyncClient cosmosClient;
private final String bulkSpanName;
private ScheduledFuture<?> scheduledFutureForFlush;
private final String identifier = "BulkExecutor-" + instanceCount.incrementAndGet();
public BulkExecutor(CosmosAsyncContainer container,
Flux<CosmosItemOperation> inputOperations,
CosmosBulkExecutionOptions cosmosBulkOptions) {
checkNotNull(container, "expected non-null container");
checkNotNull(inputOperations, "expected non-null inputOperations");
checkNotNull(cosmosBulkOptions, "expected non-null bulkOptions");
this.cosmosBulkExecutionOptions = cosmosBulkOptions;
this.container = container;
this.bulkSpanName = "nonTransactionalBatch." + this.container.getId();
this.inputOperations = inputOperations;
this.docClientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(container.getDatabase());
this.cosmosClient = ImplementationBridgeHelpers
.CosmosAsyncDatabaseHelper
.getCosmosAsyncDatabaseAccessor()
.getCosmosAsyncClient(container.getDatabase());
this.throttlingRetryOptions = docClientWrapper.getConnectionPolicy().getThrottlingRetryOptions();
maxMicroBatchIntervalInMs = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchInterval(cosmosBulkExecutionOptions)
.toMillis();
batchContext = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getLegacyBatchScopedContext(cosmosBulkExecutionOptions);
this.partitionScopeThresholds = ImplementationBridgeHelpers.CosmosBulkExecutionThresholdsStateHelper
.getBulkExecutionThresholdsAccessor()
.getPartitionScopeThresholds(cosmosBulkExecutionOptions.getThresholdsState());
operationListener = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getOperationContext(cosmosBulkExecutionOptions);
if (operationListener != null &&
operationListener.getOperationContext() != null) {
operationContextText = identifier + "[" + operationListener.getOperationContext().toString() + "]";
} else {
operationContextText = identifier +"[n/a]";
}
mainSourceCompleted = new AtomicBoolean(false);
totalCount = new AtomicInteger(0);
serializedEmitFailureHandler = new SerializedEmitFailureHandler();
mainSink = Sinks.many().unicast().onBackpressureBuffer();
groupSinks = new CopyOnWriteArrayList<>();
this.executorService = new ScheduledThreadPoolExecutor(
1,
new CosmosDaemonThreadFactory(identifier));
this.executorService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.executorService.setRemoveOnCancelPolicy(true);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
this.maxMicroBatchIntervalInMs,
this.maxMicroBatchIntervalInMs,
TimeUnit.MILLISECONDS);
logger.debug("Instantiated BulkExecutor, Context: {}",
this.operationContextText);
}
@Override
public void dispose() {
if (this.isDisposed.compareAndSet(false, true)) {
long totalCountSnapshot = totalCount.get();
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.shutdown();
}
}
}
@Override
public boolean isDisposed() {
return this.isDisposed.get();
}
private void cancelFlushTask() {
ScheduledFuture<?> scheduledFutureSnapshot = this.scheduledFutureForFlush;
if (scheduledFutureSnapshot != null) {
try {
scheduledFutureSnapshot.cancel(true);
logger.debug("Cancelled all future scheduled tasks {}, Context: {}", getThreadInfo(), this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to cancel scheduled tasks{}, Context: {}", getThreadInfo(), this.operationContextText, e);
}
}
}
private void shutdown() {
if (this.isShutdown.compareAndSet(false, true)) {
logger.debug("Shutting down, Context: {}", this.operationContextText);
groupSinks.forEach(FluxSink::complete);
logger.debug("All group sinks completed, Context: {}", this.operationContextText);
this.cancelFlushTask();
try {
logger.debug("Shutting down the executor service, Context: {}", this.operationContextText);
this.executorService.shutdownNow();
logger.debug("Successfully shut down the executor service, Context: {}", this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to shut down the executor service, Context: {}", this.operationContextText, e);
}
}
}
public Flux<CosmosBulkOperationResponse<TContext>> execute() {
return this
.executeCore()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
this.totalCount.get(),
this.operationContextText,
getThreadInfo());
} else {
int itemsLeftSnapshot = this.totalCount.get();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
}
}
this.dispose();
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executeCore() {
Integer nullableMaxConcurrentCosmosPartitions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxConcurrentCosmosPartitions(cosmosBulkExecutionOptions);
Mono<Integer> maxConcurrentCosmosPartitionsMono = nullableMaxConcurrentCosmosPartitions != null ?
Mono.just(Math.max(256, nullableMaxConcurrentCosmosPartitions)) :
this.container.getFeedRanges().map(ranges -> Math.max(256, ranges.size() * 2));
return
maxConcurrentCosmosPartitionsMono
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(maxConcurrentCosmosPartitions -> {
logger.debug("BulkExecutor.execute with MaxConcurrentPartitions: {}, Context: {}",
maxConcurrentCosmosPartitions,
this.operationContextText);
return this.inputOperations
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.onErrorContinue((throwable, o) ->
logger.error("Skipping an error operation while processing {}. Cause: {}, Context: {}",
o,
throwable.getMessage(),
this.operationContextText))
.doOnNext((CosmosItemOperation cosmosItemOperation) -> {
BulkExecutorUtil.setRetryPolicyForBulk(
docClientWrapper,
this.container,
cosmosItemOperation,
this.throttlingRetryOptions);
if (cosmosItemOperation != FlushBuffersItemOperation.singleton()) {
totalCount.incrementAndGet();
}
logger.trace(
"SetupRetryPolicy, {}, TotalCount: {}, Context: {}, {}",
getItemOperationDiagnostics(cosmosItemOperation),
totalCount.get(),
this.operationContextText,
getThreadInfo()
);
})
.doOnComplete(() -> {
mainSourceCompleted.set(true);
long totalCountSnapshot = totalCount.get();
logger.debug("Main source completed -
totalCountSnapshot,
this.operationContextText);
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.cancelFlushTask();
this.onFlush();
long flushIntervalAfterDrainingIncomingFlux = Math.min(
this.maxMicroBatchIntervalInMs,
BatchRequestResponseConstants
.DEFAULT_MAX_MICRO_BATCH_INTERVAL_AFTER_DRAINING_INCOMING_FLUX_IN_MILLISECONDS);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
flushIntervalAfterDrainingIncomingFlux,
flushIntervalAfterDrainingIncomingFlux,
TimeUnit.MILLISECONDS);
logger.debug("Scheduled new flush operation {}, Context: {}", getThreadInfo(), this.operationContextText);
}
})
.mergeWith(mainSink.asFlux())
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap(
operation -> {
logger.trace("Before Resolve PkRangeId, {}, Context: {} {}",
getItemOperationDiagnostics(operation),
this.operationContextText,
getThreadInfo());
return BulkExecutorUtil.resolvePartitionKeyRangeId(this.docClientWrapper, this.container, operation)
.map((String pkRangeId) -> {
PartitionScopeThresholds partitionScopeThresholds =
this.partitionScopeThresholds.computeIfAbsent(
pkRangeId,
(newPkRangeId) -> new PartitionScopeThresholds(newPkRangeId, this.cosmosBulkExecutionOptions));
logger.trace("Resolved PkRangeId, {}, PKRangeId: {} Context: {} {}",
getItemOperationDiagnostics(operation),
pkRangeId,
this.operationContextText,
getThreadInfo());
return Pair.of(partitionScopeThresholds, operation);
});
})
.groupBy(Pair::getKey, Pair::getValue)
.flatMap(
this::executePartitionedGroup,
maxConcurrentCosmosPartitions)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.doOnNext(requestAndResponse -> {
int totalCountAfterDecrement = totalCount.decrementAndGet();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountAfterDecrement == 0 && mainSourceCompletedSnapshot) {
logger.debug("All work completed, {}, TotalCount: {}, Context: {} {}",
getItemOperationDiagnostics(requestAndResponse.getOperation()),
totalCountAfterDecrement,
this.operationContextText,
getThreadInfo());
completeAllSinks();
} else {
if (totalCountAfterDecrement == 0) {
logger.debug(
"No Work left - but mainSource not yet completed, Context: {} {}",
this.operationContextText,
getThreadInfo());
}
logger.trace(
"Work left - TotalCount after decrement: {}, main sink completed {}, {}, Context: {} {}",
totalCountAfterDecrement,
mainSourceCompletedSnapshot,
getItemOperationDiagnostics(requestAndResponse.getOperation()),
this.operationContextText,
getThreadInfo());
}
})
.doOnComplete(() -> {
int totalCountSnapshot = totalCount.get();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountSnapshot == 0 && mainSourceCompletedSnapshot) {
logger.debug("DoOnComplete: All work completed, Context: {}", this.operationContextText);
completeAllSinks();
} else {
logger.debug(
"DoOnComplete: Work left - TotalCount after decrement: {}, main sink completed {}, Context: {} {}",
totalCountSnapshot,
mainSourceCompletedSnapshot,
this.operationContextText,
getThreadInfo());
}
});
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionedGroup(
GroupedFlux<PartitionScopeThresholds, CosmosItemOperation> partitionedGroupFluxOfInputOperations) {
final PartitionScopeThresholds thresholds = partitionedGroupFluxOfInputOperations.key();
final FluxProcessor<CosmosItemOperation, CosmosItemOperation> groupFluxProcessor =
UnicastProcessor.<CosmosItemOperation>create().serialize();
final FluxSink<CosmosItemOperation> groupSink = groupFluxProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
groupSinks.add(groupSink);
AtomicLong firstRecordTimeStamp = new AtomicLong(-1);
AtomicLong currentMicroBatchSize = new AtomicLong(0);
AtomicInteger currentTotalSerializedLength = new AtomicInteger(0);
return partitionedGroupFluxOfInputOperations
.mergeWith(groupFluxProcessor)
.onBackpressureBuffer()
.timestamp()
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.bufferUntil(timeStampItemOperationTuple -> {
long timestamp = timeStampItemOperationTuple.getT1();
CosmosItemOperation itemOperation = timeStampItemOperationTuple.getT2();
logger.trace(
"BufferUntil - enqueued {}, {}, Context: {} {}",
timestamp,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (itemOperation == FlushBuffersItemOperation.singleton()) {
long currentMicroBatchSizeSnapshot = currentMicroBatchSize.get();
if (currentMicroBatchSizeSnapshot > 0) {
logger.trace(
"Flushing PKRange {} (batch size: {}) due to FlushItemOperation, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
currentMicroBatchSizeSnapshot,
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
}
firstRecordTimeStamp.compareAndSet(-1, timestamp);
long age = timestamp - firstRecordTimeStamp.get();
long batchSize = currentMicroBatchSize.incrementAndGet();
int totalSerializedLength = this.calculateTotalSerializedLength(currentTotalSerializedLength, itemOperation);
if (batchSize >= thresholds.getTargetMicroBatchSizeSnapshot() ||
age >= this.maxMicroBatchIntervalInMs ||
totalSerializedLength >= BatchRequestResponseConstants.MAX_DIRECT_MODE_BATCH_REQUEST_BODY_SIZE_IN_BYTES) {
logger.debug(
"BufferUntil - Flushing PKRange {} due to BatchSize ({}), payload size ({}) or age ({}), " +
"Triggering {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
batchSize,
totalSerializedLength,
age,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
})
.flatMap(
(List<Tuple2<Long, CosmosItemOperation>> timeStampAndItemOperationTuples) -> {
List<CosmosItemOperation> operations = new ArrayList<>(timeStampAndItemOperationTuples.size());
for (Tuple2<Long, CosmosItemOperation> timeStampAndItemOperationTuple :
timeStampAndItemOperationTuples) {
CosmosItemOperation itemOperation = timeStampAndItemOperationTuple.getT2();
if (itemOperation == FlushBuffersItemOperation.singleton()) {
continue;
}
operations.add(itemOperation);
}
logger.debug(
"Flushing PKRange {} micro batch with {} operations, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
operations.size(),
this.operationContextText,
getThreadInfo());
return executeOperations(operations, thresholds, groupSink);
},
ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchConcurrency(this.cosmosBulkExecutionOptions));
}
private int calculateTotalSerializedLength(AtomicInteger currentTotalSerializedLength, CosmosItemOperation item) {
if (item instanceof CosmosItemOperationBase) {
return currentTotalSerializedLength.accumulateAndGet(
((CosmosItemOperationBase) item).getSerializedLength(),
(currentValue, incremental) -> currentValue + incremental);
}
return currentTotalSerializedLength.get();
}
private Flux<CosmosBulkOperationResponse<TContext>> executeOperations(
List<CosmosItemOperation> operations,
PartitionScopeThresholds thresholds,
FluxSink<CosmosItemOperation> groupSink) {
if (operations.size() == 0) {
logger.trace("Empty operations list, Context: {}", this.operationContextText);
return Flux.empty();
}
String pkRange = thresholds.getPartitionKeyRangeId();
ServerOperationBatchRequest serverOperationBatchRequest =
BulkExecutorUtil.createBatchRequest(operations, pkRange);
if (serverOperationBatchRequest.getBatchPendingOperations().size() > 0) {
serverOperationBatchRequest.getBatchPendingOperations().forEach(groupSink::next);
}
return Flux.just(serverOperationBatchRequest.getBatchRequest())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((PartitionKeyRangeServerBatchRequest serverRequest) ->
this.executePartitionKeyRangeServerBatchRequest(serverRequest, groupSink, thresholds));
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionKeyRangeServerBatchRequest(
PartitionKeyRangeServerBatchRequest serverRequest,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
return this.executeBatchRequest(serverRequest)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(response ->
Flux
.fromIterable(response.getResults())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosBatchOperationResult result) ->
handleTransactionalBatchOperationResult(response, result, groupSink, thresholds)))
.onErrorResume((Throwable throwable) -> {
if (!(throwable instanceof Exception)) {
throw Exceptions.propagate(throwable);
}
Exception exception = (Exception) throwable;
return Flux
.fromIterable(serverRequest.getOperations())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosItemOperation itemOperation) ->
handleTransactionalBatchExecutionException(itemOperation, exception, groupSink, thresholds));
});
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchOperationResult(
CosmosBatchResponse response,
CosmosBatchOperationResult operationResult,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
CosmosBulkItemResponse cosmosBulkItemResponse = ModelBridgeInternal
.createCosmosBulkItemResponse(operationResult, response);
CosmosItemOperation itemOperation = operationResult.getOperation();
TContext actualContext = this.getActualContext(itemOperation);
logger.debug(
"HandleTransactionalBatchOperationResult - PKRange {}, Response Status Code {}, " +
"Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (!operationResult.isSuccessStatusCode()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy().shouldRetry(operationResult).flatMap(
result -> {
if (result.shouldRetry) {
logger.debug(
"HandleTransactionalBatchOperationResult - enqueue retry, PKRange {}, Response " +
"Status Code {}, Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return this.enqueueForRetry(result.backOffTime, groupSink, itemOperation, thresholds);
} else {
if (response.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
response.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED) {
logger.debug(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
} else {
logger.error(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
}
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, cosmosBulkItemResponse, actualContext));
}
});
} else {
throw new UnsupportedOperationException("Unknown CosmosItemOperation.");
}
}
thresholds.recordSuccessfulOperation();
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation,
cosmosBulkItemResponse,
actualContext));
}
private TContext getActualContext(CosmosItemOperation itemOperation) {
ItemBulkOperation<?, ?> itemBulkOperation = null;
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
}
if (itemBulkOperation == null) {
return this.batchContext;
}
TContext operationContext = itemBulkOperation.getContext();
if (operationContext != null) {
return operationContext;
}
return this.batchContext;
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchExecutionException(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
logger.debug(
"HandleTransactionalBatchExecutionException, PKRange {}, Error: {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (exception instanceof CosmosException && itemOperation instanceof ItemBulkOperation<?, ?>) {
CosmosException cosmosException = (CosmosException) exception;
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy()
.shouldRetryForGone(cosmosException.getStatusCode(), cosmosException.getSubStatusCode())
.flatMap(shouldRetryGone -> {
if (shouldRetryGone) {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry due to split, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
mainSink.emitNext(itemOperation, serializedEmitFailureHandler);
return Mono.empty();
} else {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry other, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return retryOtherExceptions(
itemOperation,
exception,
groupSink,
cosmosException,
itemBulkOperation,
thresholds);
}
});
}
TContext actualContext = this.getActualContext(itemOperation);
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(itemOperation, exception, actualContext));
}
private Mono<CosmosBulkOperationResponse<TContext>> enqueueForRetry(
Duration backOffTime,
FluxSink<CosmosItemOperation> groupSink,
CosmosItemOperation itemOperation,
PartitionScopeThresholds thresholds) {
thresholds.recordEnqueuedRetry();
if (backOffTime == null || backOffTime.isZero()) {
groupSink.next(itemOperation);
return Mono.empty();
} else {
return Mono
.delay(backOffTime)
.flatMap((dummy) -> {
groupSink.next(itemOperation);
return Mono.empty();
});
}
}
private Mono<CosmosBulkOperationResponse<TContext>> retryOtherExceptions(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
CosmosException cosmosException,
ItemBulkOperation<?, ?> itemBulkOperation,
PartitionScopeThresholds thresholds) {
TContext actualContext = this.getActualContext(itemOperation);
return itemBulkOperation.getRetryPolicy().shouldRetry(cosmosException).flatMap(result -> {
if (result.shouldRetry) {
return this.enqueueForRetry(result.backOffTime, groupSink, itemBulkOperation, thresholds);
} else {
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, exception, actualContext));
}
});
}
private Mono<CosmosBatchResponse> executeBatchRequest(PartitionKeyRangeServerBatchRequest serverRequest) {
RequestOptions options = new RequestOptions();
options.setThroughputControlGroupName(cosmosBulkExecutionOptions.getThroughputControlGroupName());
Map<String, String> customOptions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getCustomOptions(cosmosBulkExecutionOptions);
if (customOptions != null && !customOptions.isEmpty()) {
for(Map.Entry<String, String> entry : customOptions.entrySet()) {
options.setHeader(entry.getKey(), entry.getValue());
}
}
options.setOperationContextAndListenerTuple(operationListener);
if (!this.docClientWrapper.isContentResponseOnWriteEnabled() &&
serverRequest.getOperations().size() > 0) {
for (CosmosItemOperation itemOperation : serverRequest.getOperations()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
if (itemBulkOperation.getOperationType() == CosmosItemOperationType.READ ||
(itemBulkOperation.getRequestOptions() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled().booleanValue())) {
options.setContentResponseOnWriteEnabled(true);
break;
}
}
}
}
return withContext(context -> {
final Mono<CosmosBatchResponse> responseMono = this.docClientWrapper.executeBatchRequest(
BridgeInternal.getLink(this.container), serverRequest, options, false);
return BridgeInternal.getTracerProvider(this.cosmosClient)
.traceEnabledBatchResponsePublisher(
responseMono,
context,
this.bulkSpanName,
this.container.getId(),
this.container.getDatabase().getId(),
this.cosmosClient,
options.getConsistencyLevel(),
OperationType.Batch,
ResourceType.Document);
});
}
private void onFlush() {
try {
this.groupSinks.forEach(sink -> sink.next(FlushBuffersItemOperation.singleton()));
} catch(Throwable t) {
logger.error("Callback invocation 'onFlush' failed. Context: {}", this.operationContextText, t);
}
}
private static String getItemOperationDiagnostics(CosmosItemOperation operation) {
if (operation == FlushBuffersItemOperation.singleton()) {
return "ItemOperation[Type: Flush]";
}
StringBuilder sb = new StringBuilder();
sb
.append("ItemOperation[Type: ")
.append(operation.getOperationType().toString())
.append(", PK: ")
.append(operation.getPartitionKeyValue() != null ? operation.getPartitionKeyValue().toString() : "n/a")
.append(", id: ")
.append(operation.getId())
.append("]");
return sb.toString();
}
private static String getThreadInfo() {
StringBuilder sb = new StringBuilder();
Thread t = Thread.currentThread();
sb
.append("Thread[")
.append("Name: ")
.append(t.getName())
.append(",Group: ")
.append(t.getThreadGroup() != null ? t.getThreadGroup().getName() : "n/a")
.append(", isDaemon: ")
.append(t.isDaemon())
.append(", Id: ")
.append(t.getId())
.append("]");
return sb.toString();
}
private class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {
@Override
public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {
if (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {
logger.debug("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return true;
}
logger.error("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return false;
}
}
} | class BulkExecutor<TContext> implements Disposable {
private final static Logger logger = LoggerFactory.getLogger(BulkExecutor.class);
private final static AtomicLong instanceCount = new AtomicLong(0);
private final CosmosAsyncContainer container;
private final AsyncDocumentClient docClientWrapper;
private final String operationContextText;
private final OperationContextAndListenerTuple operationListener;
private final ThrottlingRetryOptions throttlingRetryOptions;
private final Flux<com.azure.cosmos.models.CosmosItemOperation> inputOperations;
private final Long maxMicroBatchIntervalInMs;
private final TContext batchContext;
private final ConcurrentMap<String, PartitionScopeThresholds> partitionScopeThresholds;
private final CosmosBulkExecutionOptions cosmosBulkExecutionOptions;
private final AtomicBoolean mainSourceCompleted;
private final AtomicBoolean isDisposed = new AtomicBoolean(false);
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final AtomicInteger totalCount;
private final Sinks.EmitFailureHandler serializedEmitFailureHandler;
private final Sinks.Many<CosmosItemOperation> mainSink;
private final List<FluxSink<CosmosItemOperation>> groupSinks;
private final ScheduledThreadPoolExecutor executorService;
private final CosmosAsyncClient cosmosClient;
private final String bulkSpanName;
private ScheduledFuture<?> scheduledFutureForFlush;
private final String identifier = "BulkExecutor-" + instanceCount.incrementAndGet();
public BulkExecutor(CosmosAsyncContainer container,
Flux<CosmosItemOperation> inputOperations,
CosmosBulkExecutionOptions cosmosBulkOptions) {
checkNotNull(container, "expected non-null container");
checkNotNull(inputOperations, "expected non-null inputOperations");
checkNotNull(cosmosBulkOptions, "expected non-null bulkOptions");
this.cosmosBulkExecutionOptions = cosmosBulkOptions;
this.container = container;
this.bulkSpanName = "nonTransactionalBatch." + this.container.getId();
this.inputOperations = inputOperations;
this.docClientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(container.getDatabase());
this.cosmosClient = ImplementationBridgeHelpers
.CosmosAsyncDatabaseHelper
.getCosmosAsyncDatabaseAccessor()
.getCosmosAsyncClient(container.getDatabase());
this.throttlingRetryOptions = docClientWrapper.getConnectionPolicy().getThrottlingRetryOptions();
maxMicroBatchIntervalInMs = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchInterval(cosmosBulkExecutionOptions)
.toMillis();
batchContext = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getLegacyBatchScopedContext(cosmosBulkExecutionOptions);
this.partitionScopeThresholds = ImplementationBridgeHelpers.CosmosBulkExecutionThresholdsStateHelper
.getBulkExecutionThresholdsAccessor()
.getPartitionScopeThresholds(cosmosBulkExecutionOptions.getThresholdsState());
operationListener = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getOperationContext(cosmosBulkExecutionOptions);
if (operationListener != null &&
operationListener.getOperationContext() != null) {
operationContextText = identifier + "[" + operationListener.getOperationContext().toString() + "]";
} else {
operationContextText = identifier +"[n/a]";
}
mainSourceCompleted = new AtomicBoolean(false);
totalCount = new AtomicInteger(0);
serializedEmitFailureHandler = new SerializedEmitFailureHandler();
mainSink = Sinks.many().unicast().onBackpressureBuffer();
groupSinks = new CopyOnWriteArrayList<>();
this.executorService = new ScheduledThreadPoolExecutor(
1,
new CosmosDaemonThreadFactory(identifier));
this.executorService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.executorService.setRemoveOnCancelPolicy(true);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
this.maxMicroBatchIntervalInMs,
this.maxMicroBatchIntervalInMs,
TimeUnit.MILLISECONDS);
logger.debug("Instantiated BulkExecutor, Context: {}",
this.operationContextText);
}
@Override
public void dispose() {
if (this.isDisposed.compareAndSet(false, true)) {
long totalCountSnapshot = totalCount.get();
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.shutdown();
}
}
}
@Override
public boolean isDisposed() {
return this.isDisposed.get();
}
private void cancelFlushTask() {
ScheduledFuture<?> scheduledFutureSnapshot = this.scheduledFutureForFlush;
if (scheduledFutureSnapshot != null) {
try {
scheduledFutureSnapshot.cancel(true);
logger.debug("Cancelled all future scheduled tasks {}, Context: {}", getThreadInfo(), this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to cancel scheduled tasks{}, Context: {}", getThreadInfo(), this.operationContextText, e);
}
}
}
private void shutdown() {
if (this.isShutdown.compareAndSet(false, true)) {
logger.debug("Shutting down, Context: {}", this.operationContextText);
groupSinks.forEach(FluxSink::complete);
logger.debug("All group sinks completed, Context: {}", this.operationContextText);
this.cancelFlushTask();
try {
logger.debug("Shutting down the executor service, Context: {}", this.operationContextText);
this.executorService.shutdownNow();
logger.debug("Successfully shut down the executor service, Context: {}", this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to shut down the executor service, Context: {}", this.operationContextText, e);
}
}
}
public Flux<CosmosBulkOperationResponse<TContext>> execute() {
return this
.executeCore()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
this.totalCount.get(),
this.operationContextText,
getThreadInfo());
} else {
int itemsLeftSnapshot = this.totalCount.get();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
}
}
this.dispose();
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executeCore() {
Integer nullableMaxConcurrentCosmosPartitions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxConcurrentCosmosPartitions(cosmosBulkExecutionOptions);
Mono<Integer> maxConcurrentCosmosPartitionsMono = nullableMaxConcurrentCosmosPartitions != null ?
Mono.just(Math.max(256, nullableMaxConcurrentCosmosPartitions)) :
this.container.getFeedRanges().map(ranges -> Math.max(256, ranges.size() * 2));
return
maxConcurrentCosmosPartitionsMono
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(maxConcurrentCosmosPartitions -> {
logger.debug("BulkExecutor.execute with MaxConcurrentPartitions: {}, Context: {}",
maxConcurrentCosmosPartitions,
this.operationContextText);
return this.inputOperations
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.onErrorContinue((throwable, o) ->
logger.error("Skipping an error operation while processing {}. Cause: {}, Context: {}",
o,
throwable.getMessage(),
this.operationContextText))
.doOnNext((CosmosItemOperation cosmosItemOperation) -> {
BulkExecutorUtil.setRetryPolicyForBulk(
docClientWrapper,
this.container,
cosmosItemOperation,
this.throttlingRetryOptions);
if (cosmosItemOperation != FlushBuffersItemOperation.singleton()) {
totalCount.incrementAndGet();
}
logger.trace(
"SetupRetryPolicy, {}, TotalCount: {}, Context: {}, {}",
getItemOperationDiagnostics(cosmosItemOperation),
totalCount.get(),
this.operationContextText,
getThreadInfo()
);
})
.doOnComplete(() -> {
mainSourceCompleted.set(true);
long totalCountSnapshot = totalCount.get();
logger.debug("Main source completed -
totalCountSnapshot,
this.operationContextText);
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.cancelFlushTask();
this.onFlush();
long flushIntervalAfterDrainingIncomingFlux = Math.min(
this.maxMicroBatchIntervalInMs,
BatchRequestResponseConstants
.DEFAULT_MAX_MICRO_BATCH_INTERVAL_AFTER_DRAINING_INCOMING_FLUX_IN_MILLISECONDS);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
flushIntervalAfterDrainingIncomingFlux,
flushIntervalAfterDrainingIncomingFlux,
TimeUnit.MILLISECONDS);
logger.debug("Scheduled new flush operation {}, Context: {}", getThreadInfo(), this.operationContextText);
}
})
.mergeWith(mainSink.asFlux())
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap(
operation -> {
logger.trace("Before Resolve PkRangeId, {}, Context: {} {}",
getItemOperationDiagnostics(operation),
this.operationContextText,
getThreadInfo());
return BulkExecutorUtil.resolvePartitionKeyRangeId(this.docClientWrapper, this.container, operation)
.map((String pkRangeId) -> {
PartitionScopeThresholds partitionScopeThresholds =
this.partitionScopeThresholds.computeIfAbsent(
pkRangeId,
(newPkRangeId) -> new PartitionScopeThresholds(newPkRangeId, this.cosmosBulkExecutionOptions));
logger.trace("Resolved PkRangeId, {}, PKRangeId: {} Context: {} {}",
getItemOperationDiagnostics(operation),
pkRangeId,
this.operationContextText,
getThreadInfo());
return Pair.of(partitionScopeThresholds, operation);
});
})
.groupBy(Pair::getKey, Pair::getValue)
.flatMap(
this::executePartitionedGroup,
maxConcurrentCosmosPartitions)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.doOnNext(requestAndResponse -> {
int totalCountAfterDecrement = totalCount.decrementAndGet();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountAfterDecrement == 0 && mainSourceCompletedSnapshot) {
logger.debug("All work completed, {}, TotalCount: {}, Context: {} {}",
getItemOperationDiagnostics(requestAndResponse.getOperation()),
totalCountAfterDecrement,
this.operationContextText,
getThreadInfo());
completeAllSinks();
} else {
if (totalCountAfterDecrement == 0) {
logger.debug(
"No Work left - but mainSource not yet completed, Context: {} {}",
this.operationContextText,
getThreadInfo());
}
logger.trace(
"Work left - TotalCount after decrement: {}, main sink completed {}, {}, Context: {} {}",
totalCountAfterDecrement,
mainSourceCompletedSnapshot,
getItemOperationDiagnostics(requestAndResponse.getOperation()),
this.operationContextText,
getThreadInfo());
}
})
.doOnComplete(() -> {
int totalCountSnapshot = totalCount.get();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountSnapshot == 0 && mainSourceCompletedSnapshot) {
logger.debug("DoOnComplete: All work completed, Context: {}", this.operationContextText);
completeAllSinks();
} else {
logger.debug(
"DoOnComplete: Work left - TotalCount after decrement: {}, main sink completed {}, Context: {} {}",
totalCountSnapshot,
mainSourceCompletedSnapshot,
this.operationContextText,
getThreadInfo());
}
});
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionedGroup(
GroupedFlux<PartitionScopeThresholds, CosmosItemOperation> partitionedGroupFluxOfInputOperations) {
final PartitionScopeThresholds thresholds = partitionedGroupFluxOfInputOperations.key();
final FluxProcessor<CosmosItemOperation, CosmosItemOperation> groupFluxProcessor =
UnicastProcessor.<CosmosItemOperation>create().serialize();
final FluxSink<CosmosItemOperation> groupSink = groupFluxProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
groupSinks.add(groupSink);
AtomicLong firstRecordTimeStamp = new AtomicLong(-1);
AtomicLong currentMicroBatchSize = new AtomicLong(0);
AtomicInteger currentTotalSerializedLength = new AtomicInteger(0);
return partitionedGroupFluxOfInputOperations
.mergeWith(groupFluxProcessor)
.onBackpressureBuffer()
.timestamp()
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.bufferUntil(timeStampItemOperationTuple -> {
long timestamp = timeStampItemOperationTuple.getT1();
CosmosItemOperation itemOperation = timeStampItemOperationTuple.getT2();
logger.trace(
"BufferUntil - enqueued {}, {}, Context: {} {}",
timestamp,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (itemOperation == FlushBuffersItemOperation.singleton()) {
long currentMicroBatchSizeSnapshot = currentMicroBatchSize.get();
if (currentMicroBatchSizeSnapshot > 0) {
logger.trace(
"Flushing PKRange {} (batch size: {}) due to FlushItemOperation, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
currentMicroBatchSizeSnapshot,
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
}
firstRecordTimeStamp.compareAndSet(-1, timestamp);
long age = timestamp - firstRecordTimeStamp.get();
long batchSize = currentMicroBatchSize.incrementAndGet();
int totalSerializedLength = this.calculateTotalSerializedLength(currentTotalSerializedLength, itemOperation);
if (batchSize >= thresholds.getTargetMicroBatchSizeSnapshot() ||
age >= this.maxMicroBatchIntervalInMs ||
totalSerializedLength >= BatchRequestResponseConstants.MAX_DIRECT_MODE_BATCH_REQUEST_BODY_SIZE_IN_BYTES) {
logger.debug(
"BufferUntil - Flushing PKRange {} due to BatchSize ({}), payload size ({}) or age ({}), " +
"Triggering {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
batchSize,
totalSerializedLength,
age,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
})
.flatMap(
(List<Tuple2<Long, CosmosItemOperation>> timeStampAndItemOperationTuples) -> {
List<CosmosItemOperation> operations = new ArrayList<>(timeStampAndItemOperationTuples.size());
for (Tuple2<Long, CosmosItemOperation> timeStampAndItemOperationTuple :
timeStampAndItemOperationTuples) {
CosmosItemOperation itemOperation = timeStampAndItemOperationTuple.getT2();
if (itemOperation == FlushBuffersItemOperation.singleton()) {
continue;
}
operations.add(itemOperation);
}
logger.debug(
"Flushing PKRange {} micro batch with {} operations, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
operations.size(),
this.operationContextText,
getThreadInfo());
return executeOperations(operations, thresholds, groupSink);
},
ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchConcurrency(this.cosmosBulkExecutionOptions));
}
private int calculateTotalSerializedLength(AtomicInteger currentTotalSerializedLength, CosmosItemOperation item) {
if (item instanceof CosmosItemOperationBase) {
return currentTotalSerializedLength.accumulateAndGet(
((CosmosItemOperationBase) item).getSerializedLength(),
(currentValue, incremental) -> currentValue + incremental);
}
return currentTotalSerializedLength.get();
}
private Flux<CosmosBulkOperationResponse<TContext>> executeOperations(
List<CosmosItemOperation> operations,
PartitionScopeThresholds thresholds,
FluxSink<CosmosItemOperation> groupSink) {
if (operations.size() == 0) {
logger.trace("Empty operations list, Context: {}", this.operationContextText);
return Flux.empty();
}
String pkRange = thresholds.getPartitionKeyRangeId();
ServerOperationBatchRequest serverOperationBatchRequest =
BulkExecutorUtil.createBatchRequest(operations, pkRange);
if (serverOperationBatchRequest.getBatchPendingOperations().size() > 0) {
serverOperationBatchRequest.getBatchPendingOperations().forEach(groupSink::next);
}
return Flux.just(serverOperationBatchRequest.getBatchRequest())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((PartitionKeyRangeServerBatchRequest serverRequest) ->
this.executePartitionKeyRangeServerBatchRequest(serverRequest, groupSink, thresholds));
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionKeyRangeServerBatchRequest(
PartitionKeyRangeServerBatchRequest serverRequest,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
return this.executeBatchRequest(serverRequest)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(response ->
Flux
.fromIterable(response.getResults())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosBatchOperationResult result) ->
handleTransactionalBatchOperationResult(response, result, groupSink, thresholds)))
.onErrorResume((Throwable throwable) -> {
if (!(throwable instanceof Exception)) {
throw Exceptions.propagate(throwable);
}
Exception exception = (Exception) throwable;
return Flux
.fromIterable(serverRequest.getOperations())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosItemOperation itemOperation) ->
handleTransactionalBatchExecutionException(itemOperation, exception, groupSink, thresholds));
});
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchOperationResult(
CosmosBatchResponse response,
CosmosBatchOperationResult operationResult,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
CosmosBulkItemResponse cosmosBulkItemResponse = ModelBridgeInternal
.createCosmosBulkItemResponse(operationResult, response);
CosmosItemOperation itemOperation = operationResult.getOperation();
TContext actualContext = this.getActualContext(itemOperation);
logger.debug(
"HandleTransactionalBatchOperationResult - PKRange {}, Response Status Code {}, " +
"Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (!operationResult.isSuccessStatusCode()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy().shouldRetry(operationResult).flatMap(
result -> {
if (result.shouldRetry) {
logger.debug(
"HandleTransactionalBatchOperationResult - enqueue retry, PKRange {}, Response " +
"Status Code {}, Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return this.enqueueForRetry(result.backOffTime, groupSink, itemOperation, thresholds);
} else {
if (response.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
response.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED) {
logger.debug(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
} else {
logger.error(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
}
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, cosmosBulkItemResponse, actualContext));
}
});
} else {
throw new UnsupportedOperationException("Unknown CosmosItemOperation.");
}
}
thresholds.recordSuccessfulOperation();
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation,
cosmosBulkItemResponse,
actualContext));
}
private TContext getActualContext(CosmosItemOperation itemOperation) {
ItemBulkOperation<?, ?> itemBulkOperation = null;
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
}
if (itemBulkOperation == null) {
return this.batchContext;
}
TContext operationContext = itemBulkOperation.getContext();
if (operationContext != null) {
return operationContext;
}
return this.batchContext;
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchExecutionException(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
logger.debug(
"HandleTransactionalBatchExecutionException, PKRange {}, Error: {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (exception instanceof CosmosException && itemOperation instanceof ItemBulkOperation<?, ?>) {
CosmosException cosmosException = (CosmosException) exception;
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy()
.shouldRetryForGone(cosmosException.getStatusCode(), cosmosException.getSubStatusCode())
.flatMap(shouldRetryGone -> {
if (shouldRetryGone) {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry due to split, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
mainSink.emitNext(itemOperation, serializedEmitFailureHandler);
return Mono.empty();
} else {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry other, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return retryOtherExceptions(
itemOperation,
exception,
groupSink,
cosmosException,
itemBulkOperation,
thresholds);
}
});
}
TContext actualContext = this.getActualContext(itemOperation);
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(itemOperation, exception, actualContext));
}
private Mono<CosmosBulkOperationResponse<TContext>> enqueueForRetry(
Duration backOffTime,
FluxSink<CosmosItemOperation> groupSink,
CosmosItemOperation itemOperation,
PartitionScopeThresholds thresholds) {
thresholds.recordEnqueuedRetry();
if (backOffTime == null || backOffTime.isZero()) {
groupSink.next(itemOperation);
return Mono.empty();
} else {
return Mono
.delay(backOffTime)
.flatMap((dummy) -> {
groupSink.next(itemOperation);
return Mono.empty();
});
}
}
private Mono<CosmosBulkOperationResponse<TContext>> retryOtherExceptions(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
CosmosException cosmosException,
ItemBulkOperation<?, ?> itemBulkOperation,
PartitionScopeThresholds thresholds) {
TContext actualContext = this.getActualContext(itemOperation);
return itemBulkOperation.getRetryPolicy().shouldRetry(cosmosException).flatMap(result -> {
if (result.shouldRetry) {
return this.enqueueForRetry(result.backOffTime, groupSink, itemBulkOperation, thresholds);
} else {
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, exception, actualContext));
}
});
}
private Mono<CosmosBatchResponse> executeBatchRequest(PartitionKeyRangeServerBatchRequest serverRequest) {
RequestOptions options = new RequestOptions();
options.setThroughputControlGroupName(cosmosBulkExecutionOptions.getThroughputControlGroupName());
Map<String, String> customOptions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getCustomOptions(cosmosBulkExecutionOptions);
if (customOptions != null && !customOptions.isEmpty()) {
for(Map.Entry<String, String> entry : customOptions.entrySet()) {
options.setHeader(entry.getKey(), entry.getValue());
}
}
options.setOperationContextAndListenerTuple(operationListener);
if (!this.docClientWrapper.isContentResponseOnWriteEnabled() &&
serverRequest.getOperations().size() > 0) {
for (CosmosItemOperation itemOperation : serverRequest.getOperations()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
if (itemBulkOperation.getOperationType() == CosmosItemOperationType.READ ||
(itemBulkOperation.getRequestOptions() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled().booleanValue())) {
options.setContentResponseOnWriteEnabled(true);
break;
}
}
}
}
return withContext(context -> {
final Mono<CosmosBatchResponse> responseMono = this.docClientWrapper.executeBatchRequest(
BridgeInternal.getLink(this.container), serverRequest, options, false);
return BridgeInternal.getTracerProvider(this.cosmosClient)
.traceEnabledBatchResponsePublisher(
responseMono,
context,
this.bulkSpanName,
this.container.getId(),
this.container.getDatabase().getId(),
this.cosmosClient,
options.getConsistencyLevel(),
OperationType.Batch,
ResourceType.Document);
});
}
private void onFlush() {
try {
this.groupSinks.forEach(sink -> sink.next(FlushBuffersItemOperation.singleton()));
} catch(Throwable t) {
logger.error("Callback invocation 'onFlush' failed. Context: {}", this.operationContextText, t);
}
}
private static String getItemOperationDiagnostics(CosmosItemOperation operation) {
if (operation == FlushBuffersItemOperation.singleton()) {
return "ItemOperation[Type: Flush]";
}
StringBuilder sb = new StringBuilder();
sb
.append("ItemOperation[Type: ")
.append(operation.getOperationType().toString())
.append(", PK: ")
.append(operation.getPartitionKeyValue() != null ? operation.getPartitionKeyValue().toString() : "n/a")
.append(", id: ")
.append(operation.getId())
.append("]");
return sb.toString();
}
private static String getThreadInfo() {
StringBuilder sb = new StringBuilder();
Thread t = Thread.currentThread();
sb
.append("Thread[")
.append("Name: ")
.append(t.getName())
.append(",Group: ")
.append(t.getThreadGroup() != null ? t.getThreadGroup().getName() : "n/a")
.append(", isDaemon: ")
.append(t.isDaemon())
.append(", Id: ")
.append(t.getId())
.append("]");
return sb.toString();
}
private class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {
@Override
public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {
if (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {
logger.debug("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return true;
}
logger.error("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return false;
}
}
} |
I think with moving the high volumn logs to debug we can make this warning | private void completeAllSinks() {
logger.info("Closing all sinks, Context: {}", this.operationContextText);
logger.debug("Executor service shut down, Context: {}", this.operationContextText);
Sinks.EmitResult completeEmitResult = mainSink.tryEmitComplete();
if (completeEmitResult == Sinks.EmitResult.OK) {
logger.debug("Main sink completed, Context: {}", this.operationContextText);
} else {
if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED ||
completeEmitResult == Sinks.EmitResult.FAIL_TERMINATED) {
logger.debug("Main sink already completed, EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
} else {
logger.info(
"Main sink completion failed. EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
}
}
this.shutdown();
} | logger.info( | private void completeAllSinks() {
logger.info("Closing all sinks, Context: {}", this.operationContextText);
logger.debug("Executor service shut down, Context: {}", this.operationContextText);
Sinks.EmitResult completeEmitResult = mainSink.tryEmitComplete();
if (completeEmitResult == Sinks.EmitResult.OK) {
logger.debug("Main sink completed, Context: {}", this.operationContextText);
} else {
if (completeEmitResult == Sinks.EmitResult.FAIL_CANCELLED ||
completeEmitResult == Sinks.EmitResult.FAIL_TERMINATED) {
logger.debug("Main sink already completed, EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
} else {
logger.warn(
"Main sink completion failed. EmitResult: {}, Context: {}",
completeEmitResult,
this.operationContextText);
}
}
this.shutdown();
} | class BulkExecutor<TContext> implements Disposable {
private final static Logger logger = LoggerFactory.getLogger(BulkExecutor.class);
private final static AtomicLong instanceCount = new AtomicLong(0);
private final CosmosAsyncContainer container;
private final AsyncDocumentClient docClientWrapper;
private final String operationContextText;
private final OperationContextAndListenerTuple operationListener;
private final ThrottlingRetryOptions throttlingRetryOptions;
private final Flux<com.azure.cosmos.models.CosmosItemOperation> inputOperations;
private final Long maxMicroBatchIntervalInMs;
private final TContext batchContext;
private final ConcurrentMap<String, PartitionScopeThresholds> partitionScopeThresholds;
private final CosmosBulkExecutionOptions cosmosBulkExecutionOptions;
private final AtomicBoolean mainSourceCompleted;
private final AtomicBoolean isDisposed = new AtomicBoolean(false);
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final AtomicInteger totalCount;
private final Sinks.EmitFailureHandler serializedEmitFailureHandler;
private final Sinks.Many<CosmosItemOperation> mainSink;
private final List<FluxSink<CosmosItemOperation>> groupSinks;
private final ScheduledThreadPoolExecutor executorService;
private final CosmosAsyncClient cosmosClient;
private final String bulkSpanName;
private ScheduledFuture<?> scheduledFutureForFlush;
private final String identifier = "BulkExecutor-" + instanceCount.incrementAndGet();
public BulkExecutor(CosmosAsyncContainer container,
Flux<CosmosItemOperation> inputOperations,
CosmosBulkExecutionOptions cosmosBulkOptions) {
checkNotNull(container, "expected non-null container");
checkNotNull(inputOperations, "expected non-null inputOperations");
checkNotNull(cosmosBulkOptions, "expected non-null bulkOptions");
this.cosmosBulkExecutionOptions = cosmosBulkOptions;
this.container = container;
this.bulkSpanName = "nonTransactionalBatch." + this.container.getId();
this.inputOperations = inputOperations;
this.docClientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(container.getDatabase());
this.cosmosClient = ImplementationBridgeHelpers
.CosmosAsyncDatabaseHelper
.getCosmosAsyncDatabaseAccessor()
.getCosmosAsyncClient(container.getDatabase());
this.throttlingRetryOptions = docClientWrapper.getConnectionPolicy().getThrottlingRetryOptions();
maxMicroBatchIntervalInMs = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchInterval(cosmosBulkExecutionOptions)
.toMillis();
batchContext = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getLegacyBatchScopedContext(cosmosBulkExecutionOptions);
this.partitionScopeThresholds = ImplementationBridgeHelpers.CosmosBulkExecutionThresholdsStateHelper
.getBulkExecutionThresholdsAccessor()
.getPartitionScopeThresholds(cosmosBulkExecutionOptions.getThresholdsState());
operationListener = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getOperationContext(cosmosBulkExecutionOptions);
if (operationListener != null &&
operationListener.getOperationContext() != null) {
operationContextText = identifier + "[" + operationListener.getOperationContext().toString() + "]";
} else {
operationContextText = identifier +"[n/a]";
}
mainSourceCompleted = new AtomicBoolean(false);
totalCount = new AtomicInteger(0);
serializedEmitFailureHandler = new SerializedEmitFailureHandler();
mainSink = Sinks.many().unicast().onBackpressureBuffer();
groupSinks = new CopyOnWriteArrayList<>();
this.executorService = new ScheduledThreadPoolExecutor(
1,
new CosmosDaemonThreadFactory(identifier));
this.executorService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.executorService.setRemoveOnCancelPolicy(true);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
this.maxMicroBatchIntervalInMs,
this.maxMicroBatchIntervalInMs,
TimeUnit.MILLISECONDS);
logger.debug("Instantiated BulkExecutor, Context: {}",
this.operationContextText);
}
@Override
public void dispose() {
if (this.isDisposed.compareAndSet(false, true)) {
long totalCountSnapshot = totalCount.get();
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.shutdown();
}
}
}
@Override
public boolean isDisposed() {
return this.isDisposed.get();
}
private void cancelFlushTask() {
ScheduledFuture<?> scheduledFutureSnapshot = this.scheduledFutureForFlush;
if (scheduledFutureSnapshot != null) {
try {
scheduledFutureSnapshot.cancel(true);
logger.debug("Cancelled all future scheduled tasks {}, Context: {}", getThreadInfo(), this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to cancel scheduled tasks{}, Context: {}", getThreadInfo(), this.operationContextText, e);
}
}
}
private void shutdown() {
if (this.isShutdown.compareAndSet(false, true)) {
logger.debug("Shutting down, Context: {}", this.operationContextText);
groupSinks.forEach(FluxSink::complete);
logger.debug("All group sinks completed, Context: {}", this.operationContextText);
this.cancelFlushTask();
try {
logger.debug("Shutting down the executor service, Context: {}", this.operationContextText);
this.executorService.shutdownNow();
logger.debug("Successfully shut down the executor service, Context: {}", this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to shut down the executor service, Context: {}", this.operationContextText, e);
}
}
}
public Flux<CosmosBulkOperationResponse<TContext>> execute() {
return this
.executeCore()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
this.totalCount.get(),
this.operationContextText,
getThreadInfo());
} else {
int itemsLeftSnapshot = this.totalCount.get();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
}
}
this.dispose();
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executeCore() {
Integer nullableMaxConcurrentCosmosPartitions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxConcurrentCosmosPartitions(cosmosBulkExecutionOptions);
Mono<Integer> maxConcurrentCosmosPartitionsMono = nullableMaxConcurrentCosmosPartitions != null ?
Mono.just(Math.max(256, nullableMaxConcurrentCosmosPartitions)) :
this.container.getFeedRanges().map(ranges -> Math.max(256, ranges.size() * 2));
return
maxConcurrentCosmosPartitionsMono
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(maxConcurrentCosmosPartitions -> {
logger.debug("BulkExecutor.execute with MaxConcurrentPartitions: {}, Context: {}",
maxConcurrentCosmosPartitions,
this.operationContextText);
return this.inputOperations
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.onErrorContinue((throwable, o) ->
logger.error("Skipping an error operation while processing {}. Cause: {}, Context: {}",
o,
throwable.getMessage(),
this.operationContextText))
.doOnNext((CosmosItemOperation cosmosItemOperation) -> {
BulkExecutorUtil.setRetryPolicyForBulk(
docClientWrapper,
this.container,
cosmosItemOperation,
this.throttlingRetryOptions);
if (cosmosItemOperation != FlushBuffersItemOperation.singleton()) {
totalCount.incrementAndGet();
}
logger.trace(
"SetupRetryPolicy, {}, TotalCount: {}, Context: {}, {}",
getItemOperationDiagnostics(cosmosItemOperation),
totalCount.get(),
this.operationContextText,
getThreadInfo()
);
})
.doOnComplete(() -> {
mainSourceCompleted.set(true);
long totalCountSnapshot = totalCount.get();
logger.debug("Main source completed -
totalCountSnapshot,
this.operationContextText);
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.cancelFlushTask();
this.onFlush();
long flushIntervalAfterDrainingIncomingFlux = Math.min(
this.maxMicroBatchIntervalInMs,
BatchRequestResponseConstants
.DEFAULT_MAX_MICRO_BATCH_INTERVAL_AFTER_DRAINING_INCOMING_FLUX_IN_MILLISECONDS);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
flushIntervalAfterDrainingIncomingFlux,
flushIntervalAfterDrainingIncomingFlux,
TimeUnit.MILLISECONDS);
logger.debug("Scheduled new flush operation {}, Context: {}", getThreadInfo(), this.operationContextText);
}
})
.mergeWith(mainSink.asFlux())
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap(
operation -> {
logger.trace("Before Resolve PkRangeId, {}, Context: {} {}",
getItemOperationDiagnostics(operation),
this.operationContextText,
getThreadInfo());
return BulkExecutorUtil.resolvePartitionKeyRangeId(this.docClientWrapper, this.container, operation)
.map((String pkRangeId) -> {
PartitionScopeThresholds partitionScopeThresholds =
this.partitionScopeThresholds.computeIfAbsent(
pkRangeId,
(newPkRangeId) -> new PartitionScopeThresholds(newPkRangeId, this.cosmosBulkExecutionOptions));
logger.trace("Resolved PkRangeId, {}, PKRangeId: {} Context: {} {}",
getItemOperationDiagnostics(operation),
pkRangeId,
this.operationContextText,
getThreadInfo());
return Pair.of(partitionScopeThresholds, operation);
});
})
.groupBy(Pair::getKey, Pair::getValue)
.flatMap(
this::executePartitionedGroup,
maxConcurrentCosmosPartitions)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.doOnNext(requestAndResponse -> {
int totalCountAfterDecrement = totalCount.decrementAndGet();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountAfterDecrement == 0 && mainSourceCompletedSnapshot) {
logger.debug("All work completed, {}, TotalCount: {}, Context: {} {}",
getItemOperationDiagnostics(requestAndResponse.getOperation()),
totalCountAfterDecrement,
this.operationContextText,
getThreadInfo());
completeAllSinks();
} else {
if (totalCountAfterDecrement == 0) {
logger.debug(
"No Work left - but mainSource not yet completed, Context: {} {}",
this.operationContextText,
getThreadInfo());
}
logger.trace(
"Work left - TotalCount after decrement: {}, main sink completed {}, {}, Context: {} {}",
totalCountAfterDecrement,
mainSourceCompletedSnapshot,
getItemOperationDiagnostics(requestAndResponse.getOperation()),
this.operationContextText,
getThreadInfo());
}
})
.doOnComplete(() -> {
int totalCountSnapshot = totalCount.get();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountSnapshot == 0 && mainSourceCompletedSnapshot) {
logger.debug("DoOnComplete: All work completed, Context: {}", this.operationContextText);
completeAllSinks();
} else {
logger.debug(
"DoOnComplete: Work left - TotalCount after decrement: {}, main sink completed {}, Context: {} {}",
totalCountSnapshot,
mainSourceCompletedSnapshot,
this.operationContextText,
getThreadInfo());
}
});
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionedGroup(
GroupedFlux<PartitionScopeThresholds, CosmosItemOperation> partitionedGroupFluxOfInputOperations) {
final PartitionScopeThresholds thresholds = partitionedGroupFluxOfInputOperations.key();
final FluxProcessor<CosmosItemOperation, CosmosItemOperation> groupFluxProcessor =
UnicastProcessor.<CosmosItemOperation>create().serialize();
final FluxSink<CosmosItemOperation> groupSink = groupFluxProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
groupSinks.add(groupSink);
AtomicLong firstRecordTimeStamp = new AtomicLong(-1);
AtomicLong currentMicroBatchSize = new AtomicLong(0);
AtomicInteger currentTotalSerializedLength = new AtomicInteger(0);
return partitionedGroupFluxOfInputOperations
.mergeWith(groupFluxProcessor)
.onBackpressureBuffer()
.timestamp()
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.bufferUntil(timeStampItemOperationTuple -> {
long timestamp = timeStampItemOperationTuple.getT1();
CosmosItemOperation itemOperation = timeStampItemOperationTuple.getT2();
logger.trace(
"BufferUntil - enqueued {}, {}, Context: {} {}",
timestamp,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (itemOperation == FlushBuffersItemOperation.singleton()) {
long currentMicroBatchSizeSnapshot = currentMicroBatchSize.get();
if (currentMicroBatchSizeSnapshot > 0) {
logger.trace(
"Flushing PKRange {} (batch size: {}) due to FlushItemOperation, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
currentMicroBatchSizeSnapshot,
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
}
firstRecordTimeStamp.compareAndSet(-1, timestamp);
long age = timestamp - firstRecordTimeStamp.get();
long batchSize = currentMicroBatchSize.incrementAndGet();
int totalSerializedLength = this.calculateTotalSerializedLength(currentTotalSerializedLength, itemOperation);
if (batchSize >= thresholds.getTargetMicroBatchSizeSnapshot() ||
age >= this.maxMicroBatchIntervalInMs ||
totalSerializedLength >= BatchRequestResponseConstants.MAX_DIRECT_MODE_BATCH_REQUEST_BODY_SIZE_IN_BYTES) {
logger.debug(
"BufferUntil - Flushing PKRange {} due to BatchSize ({}), payload size ({}) or age ({}), " +
"Triggering {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
batchSize,
totalSerializedLength,
age,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
})
.flatMap(
(List<Tuple2<Long, CosmosItemOperation>> timeStampAndItemOperationTuples) -> {
List<CosmosItemOperation> operations = new ArrayList<>(timeStampAndItemOperationTuples.size());
for (Tuple2<Long, CosmosItemOperation> timeStampAndItemOperationTuple :
timeStampAndItemOperationTuples) {
CosmosItemOperation itemOperation = timeStampAndItemOperationTuple.getT2();
if (itemOperation == FlushBuffersItemOperation.singleton()) {
continue;
}
operations.add(itemOperation);
}
logger.debug(
"Flushing PKRange {} micro batch with {} operations, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
operations.size(),
this.operationContextText,
getThreadInfo());
return executeOperations(operations, thresholds, groupSink);
},
ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchConcurrency(this.cosmosBulkExecutionOptions));
}
private int calculateTotalSerializedLength(AtomicInteger currentTotalSerializedLength, CosmosItemOperation item) {
if (item instanceof CosmosItemOperationBase) {
return currentTotalSerializedLength.accumulateAndGet(
((CosmosItemOperationBase) item).getSerializedLength(),
(currentValue, incremental) -> currentValue + incremental);
}
return currentTotalSerializedLength.get();
}
private Flux<CosmosBulkOperationResponse<TContext>> executeOperations(
List<CosmosItemOperation> operations,
PartitionScopeThresholds thresholds,
FluxSink<CosmosItemOperation> groupSink) {
if (operations.size() == 0) {
logger.trace("Empty operations list, Context: {}", this.operationContextText);
return Flux.empty();
}
String pkRange = thresholds.getPartitionKeyRangeId();
ServerOperationBatchRequest serverOperationBatchRequest =
BulkExecutorUtil.createBatchRequest(operations, pkRange);
if (serverOperationBatchRequest.getBatchPendingOperations().size() > 0) {
serverOperationBatchRequest.getBatchPendingOperations().forEach(groupSink::next);
}
return Flux.just(serverOperationBatchRequest.getBatchRequest())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((PartitionKeyRangeServerBatchRequest serverRequest) ->
this.executePartitionKeyRangeServerBatchRequest(serverRequest, groupSink, thresholds));
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionKeyRangeServerBatchRequest(
PartitionKeyRangeServerBatchRequest serverRequest,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
return this.executeBatchRequest(serverRequest)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(response ->
Flux
.fromIterable(response.getResults())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosBatchOperationResult result) ->
handleTransactionalBatchOperationResult(response, result, groupSink, thresholds)))
.onErrorResume((Throwable throwable) -> {
if (!(throwable instanceof Exception)) {
throw Exceptions.propagate(throwable);
}
Exception exception = (Exception) throwable;
return Flux
.fromIterable(serverRequest.getOperations())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosItemOperation itemOperation) ->
handleTransactionalBatchExecutionException(itemOperation, exception, groupSink, thresholds));
});
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchOperationResult(
CosmosBatchResponse response,
CosmosBatchOperationResult operationResult,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
CosmosBulkItemResponse cosmosBulkItemResponse = ModelBridgeInternal
.createCosmosBulkItemResponse(operationResult, response);
CosmosItemOperation itemOperation = operationResult.getOperation();
TContext actualContext = this.getActualContext(itemOperation);
logger.debug(
"HandleTransactionalBatchOperationResult - PKRange {}, Response Status Code {}, " +
"Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (!operationResult.isSuccessStatusCode()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy().shouldRetry(operationResult).flatMap(
result -> {
if (result.shouldRetry) {
logger.debug(
"HandleTransactionalBatchOperationResult - enqueue retry, PKRange {}, Response " +
"Status Code {}, Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return this.enqueueForRetry(result.backOffTime, groupSink, itemOperation, thresholds);
} else {
if (response.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
response.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED) {
logger.debug(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
} else {
logger.error(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
}
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, cosmosBulkItemResponse, actualContext));
}
});
} else {
throw new UnsupportedOperationException("Unknown CosmosItemOperation.");
}
}
thresholds.recordSuccessfulOperation();
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation,
cosmosBulkItemResponse,
actualContext));
}
private TContext getActualContext(CosmosItemOperation itemOperation) {
ItemBulkOperation<?, ?> itemBulkOperation = null;
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
}
if (itemBulkOperation == null) {
return this.batchContext;
}
TContext operationContext = itemBulkOperation.getContext();
if (operationContext != null) {
return operationContext;
}
return this.batchContext;
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchExecutionException(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
logger.debug(
"HandleTransactionalBatchExecutionException, PKRange {}, Error: {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (exception instanceof CosmosException && itemOperation instanceof ItemBulkOperation<?, ?>) {
CosmosException cosmosException = (CosmosException) exception;
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy()
.shouldRetryForGone(cosmosException.getStatusCode(), cosmosException.getSubStatusCode())
.flatMap(shouldRetryGone -> {
if (shouldRetryGone) {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry due to split, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
mainSink.emitNext(itemOperation, serializedEmitFailureHandler);
return Mono.empty();
} else {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry other, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return retryOtherExceptions(
itemOperation,
exception,
groupSink,
cosmosException,
itemBulkOperation,
thresholds);
}
});
}
TContext actualContext = this.getActualContext(itemOperation);
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(itemOperation, exception, actualContext));
}
private Mono<CosmosBulkOperationResponse<TContext>> enqueueForRetry(
Duration backOffTime,
FluxSink<CosmosItemOperation> groupSink,
CosmosItemOperation itemOperation,
PartitionScopeThresholds thresholds) {
thresholds.recordEnqueuedRetry();
if (backOffTime == null || backOffTime.isZero()) {
groupSink.next(itemOperation);
return Mono.empty();
} else {
return Mono
.delay(backOffTime)
.flatMap((dummy) -> {
groupSink.next(itemOperation);
return Mono.empty();
});
}
}
private Mono<CosmosBulkOperationResponse<TContext>> retryOtherExceptions(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
CosmosException cosmosException,
ItemBulkOperation<?, ?> itemBulkOperation,
PartitionScopeThresholds thresholds) {
TContext actualContext = this.getActualContext(itemOperation);
return itemBulkOperation.getRetryPolicy().shouldRetry(cosmosException).flatMap(result -> {
if (result.shouldRetry) {
return this.enqueueForRetry(result.backOffTime, groupSink, itemBulkOperation, thresholds);
} else {
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, exception, actualContext));
}
});
}
private Mono<CosmosBatchResponse> executeBatchRequest(PartitionKeyRangeServerBatchRequest serverRequest) {
RequestOptions options = new RequestOptions();
options.setThroughputControlGroupName(cosmosBulkExecutionOptions.getThroughputControlGroupName());
Map<String, String> customOptions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getCustomOptions(cosmosBulkExecutionOptions);
if (customOptions != null && !customOptions.isEmpty()) {
for(Map.Entry<String, String> entry : customOptions.entrySet()) {
options.setHeader(entry.getKey(), entry.getValue());
}
}
options.setOperationContextAndListenerTuple(operationListener);
if (!this.docClientWrapper.isContentResponseOnWriteEnabled() &&
serverRequest.getOperations().size() > 0) {
for (CosmosItemOperation itemOperation : serverRequest.getOperations()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
if (itemBulkOperation.getOperationType() == CosmosItemOperationType.READ ||
(itemBulkOperation.getRequestOptions() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled().booleanValue())) {
options.setContentResponseOnWriteEnabled(true);
break;
}
}
}
}
return withContext(context -> {
final Mono<CosmosBatchResponse> responseMono = this.docClientWrapper.executeBatchRequest(
BridgeInternal.getLink(this.container), serverRequest, options, false);
return BridgeInternal.getTracerProvider(this.cosmosClient)
.traceEnabledBatchResponsePublisher(
responseMono,
context,
this.bulkSpanName,
this.container.getId(),
this.container.getDatabase().getId(),
this.cosmosClient,
options.getConsistencyLevel(),
OperationType.Batch,
ResourceType.Document);
});
}
private void onFlush() {
try {
this.groupSinks.forEach(sink -> sink.next(FlushBuffersItemOperation.singleton()));
} catch(Throwable t) {
logger.error("Callback invocation 'onFlush' failed. Context: {}", this.operationContextText, t);
}
}
private static String getItemOperationDiagnostics(CosmosItemOperation operation) {
if (operation == FlushBuffersItemOperation.singleton()) {
return "ItemOperation[Type: Flush]";
}
StringBuilder sb = new StringBuilder();
sb
.append("ItemOperation[Type: ")
.append(operation.getOperationType().toString())
.append(", PK: ")
.append(operation.getPartitionKeyValue() != null ? operation.getPartitionKeyValue().toString() : "n/a")
.append(", id: ")
.append(operation.getId())
.append("]");
return sb.toString();
}
private static String getThreadInfo() {
StringBuilder sb = new StringBuilder();
Thread t = Thread.currentThread();
sb
.append("Thread[")
.append("Name: ")
.append(t.getName())
.append(",Group: ")
.append(t.getThreadGroup() != null ? t.getThreadGroup().getName() : "n/a")
.append(", isDaemon: ")
.append(t.isDaemon())
.append(", Id: ")
.append(t.getId())
.append("]");
return sb.toString();
}
private class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {
@Override
public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {
if (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {
logger.debug("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return true;
}
logger.error("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return false;
}
}
} | class BulkExecutor<TContext> implements Disposable {
private final static Logger logger = LoggerFactory.getLogger(BulkExecutor.class);
private final static AtomicLong instanceCount = new AtomicLong(0);
private final CosmosAsyncContainer container;
private final AsyncDocumentClient docClientWrapper;
private final String operationContextText;
private final OperationContextAndListenerTuple operationListener;
private final ThrottlingRetryOptions throttlingRetryOptions;
private final Flux<com.azure.cosmos.models.CosmosItemOperation> inputOperations;
private final Long maxMicroBatchIntervalInMs;
private final TContext batchContext;
private final ConcurrentMap<String, PartitionScopeThresholds> partitionScopeThresholds;
private final CosmosBulkExecutionOptions cosmosBulkExecutionOptions;
private final AtomicBoolean mainSourceCompleted;
private final AtomicBoolean isDisposed = new AtomicBoolean(false);
private final AtomicBoolean isShutdown = new AtomicBoolean(false);
private final AtomicInteger totalCount;
private final Sinks.EmitFailureHandler serializedEmitFailureHandler;
private final Sinks.Many<CosmosItemOperation> mainSink;
private final List<FluxSink<CosmosItemOperation>> groupSinks;
private final ScheduledThreadPoolExecutor executorService;
private final CosmosAsyncClient cosmosClient;
private final String bulkSpanName;
private ScheduledFuture<?> scheduledFutureForFlush;
private final String identifier = "BulkExecutor-" + instanceCount.incrementAndGet();
public BulkExecutor(CosmosAsyncContainer container,
Flux<CosmosItemOperation> inputOperations,
CosmosBulkExecutionOptions cosmosBulkOptions) {
checkNotNull(container, "expected non-null container");
checkNotNull(inputOperations, "expected non-null inputOperations");
checkNotNull(cosmosBulkOptions, "expected non-null bulkOptions");
this.cosmosBulkExecutionOptions = cosmosBulkOptions;
this.container = container;
this.bulkSpanName = "nonTransactionalBatch." + this.container.getId();
this.inputOperations = inputOperations;
this.docClientWrapper = CosmosBridgeInternal.getAsyncDocumentClient(container.getDatabase());
this.cosmosClient = ImplementationBridgeHelpers
.CosmosAsyncDatabaseHelper
.getCosmosAsyncDatabaseAccessor()
.getCosmosAsyncClient(container.getDatabase());
this.throttlingRetryOptions = docClientWrapper.getConnectionPolicy().getThrottlingRetryOptions();
maxMicroBatchIntervalInMs = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchInterval(cosmosBulkExecutionOptions)
.toMillis();
batchContext = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getLegacyBatchScopedContext(cosmosBulkExecutionOptions);
this.partitionScopeThresholds = ImplementationBridgeHelpers.CosmosBulkExecutionThresholdsStateHelper
.getBulkExecutionThresholdsAccessor()
.getPartitionScopeThresholds(cosmosBulkExecutionOptions.getThresholdsState());
operationListener = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getOperationContext(cosmosBulkExecutionOptions);
if (operationListener != null &&
operationListener.getOperationContext() != null) {
operationContextText = identifier + "[" + operationListener.getOperationContext().toString() + "]";
} else {
operationContextText = identifier +"[n/a]";
}
mainSourceCompleted = new AtomicBoolean(false);
totalCount = new AtomicInteger(0);
serializedEmitFailureHandler = new SerializedEmitFailureHandler();
mainSink = Sinks.many().unicast().onBackpressureBuffer();
groupSinks = new CopyOnWriteArrayList<>();
this.executorService = new ScheduledThreadPoolExecutor(
1,
new CosmosDaemonThreadFactory(identifier));
this.executorService.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.executorService.setRemoveOnCancelPolicy(true);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
this.maxMicroBatchIntervalInMs,
this.maxMicroBatchIntervalInMs,
TimeUnit.MILLISECONDS);
logger.debug("Instantiated BulkExecutor, Context: {}",
this.operationContextText);
}
@Override
public void dispose() {
if (this.isDisposed.compareAndSet(false, true)) {
long totalCountSnapshot = totalCount.get();
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.shutdown();
}
}
}
@Override
public boolean isDisposed() {
return this.isDisposed.get();
}
private void cancelFlushTask() {
ScheduledFuture<?> scheduledFutureSnapshot = this.scheduledFutureForFlush;
if (scheduledFutureSnapshot != null) {
try {
scheduledFutureSnapshot.cancel(true);
logger.debug("Cancelled all future scheduled tasks {}, Context: {}", getThreadInfo(), this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to cancel scheduled tasks{}, Context: {}", getThreadInfo(), this.operationContextText, e);
}
}
}
private void shutdown() {
if (this.isShutdown.compareAndSet(false, true)) {
logger.debug("Shutting down, Context: {}", this.operationContextText);
groupSinks.forEach(FluxSink::complete);
logger.debug("All group sinks completed, Context: {}", this.operationContextText);
this.cancelFlushTask();
try {
logger.debug("Shutting down the executor service, Context: {}", this.operationContextText);
this.executorService.shutdownNow();
logger.debug("Successfully shut down the executor service, Context: {}", this.operationContextText);
} catch (Exception e) {
logger.warn("Failed to shut down the executor service, Context: {}", this.operationContextText, e);
}
}
}
public Flux<CosmosBulkOperationResponse<TContext>> execute() {
return this
.executeCore()
.doFinally((SignalType signal) -> {
if (signal == SignalType.ON_COMPLETE) {
logger.debug("BulkExecutor.execute flux completed -
this.totalCount.get(),
this.operationContextText,
getThreadInfo());
} else {
int itemsLeftSnapshot = this.totalCount.get();
if (itemsLeftSnapshot > 0) {
logger.info("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
} else {
logger.debug("BulkExecutor.execute flux terminated - Signal: {} -
signal,
itemsLeftSnapshot,
this.operationContextText,
getThreadInfo());
}
}
this.dispose();
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executeCore() {
Integer nullableMaxConcurrentCosmosPartitions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxConcurrentCosmosPartitions(cosmosBulkExecutionOptions);
Mono<Integer> maxConcurrentCosmosPartitionsMono = nullableMaxConcurrentCosmosPartitions != null ?
Mono.just(Math.max(256, nullableMaxConcurrentCosmosPartitions)) :
this.container.getFeedRanges().map(ranges -> Math.max(256, ranges.size() * 2));
return
maxConcurrentCosmosPartitionsMono
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(maxConcurrentCosmosPartitions -> {
logger.debug("BulkExecutor.execute with MaxConcurrentPartitions: {}, Context: {}",
maxConcurrentCosmosPartitions,
this.operationContextText);
return this.inputOperations
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.onErrorContinue((throwable, o) ->
logger.error("Skipping an error operation while processing {}. Cause: {}, Context: {}",
o,
throwable.getMessage(),
this.operationContextText))
.doOnNext((CosmosItemOperation cosmosItemOperation) -> {
BulkExecutorUtil.setRetryPolicyForBulk(
docClientWrapper,
this.container,
cosmosItemOperation,
this.throttlingRetryOptions);
if (cosmosItemOperation != FlushBuffersItemOperation.singleton()) {
totalCount.incrementAndGet();
}
logger.trace(
"SetupRetryPolicy, {}, TotalCount: {}, Context: {}, {}",
getItemOperationDiagnostics(cosmosItemOperation),
totalCount.get(),
this.operationContextText,
getThreadInfo()
);
})
.doOnComplete(() -> {
mainSourceCompleted.set(true);
long totalCountSnapshot = totalCount.get();
logger.debug("Main source completed -
totalCountSnapshot,
this.operationContextText);
if (totalCountSnapshot == 0) {
completeAllSinks();
} else {
this.cancelFlushTask();
this.onFlush();
long flushIntervalAfterDrainingIncomingFlux = Math.min(
this.maxMicroBatchIntervalInMs,
BatchRequestResponseConstants
.DEFAULT_MAX_MICRO_BATCH_INTERVAL_AFTER_DRAINING_INCOMING_FLUX_IN_MILLISECONDS);
this.scheduledFutureForFlush = this.executorService.scheduleWithFixedDelay(
this::onFlush,
flushIntervalAfterDrainingIncomingFlux,
flushIntervalAfterDrainingIncomingFlux,
TimeUnit.MILLISECONDS);
logger.debug("Scheduled new flush operation {}, Context: {}", getThreadInfo(), this.operationContextText);
}
})
.mergeWith(mainSink.asFlux())
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap(
operation -> {
logger.trace("Before Resolve PkRangeId, {}, Context: {} {}",
getItemOperationDiagnostics(operation),
this.operationContextText,
getThreadInfo());
return BulkExecutorUtil.resolvePartitionKeyRangeId(this.docClientWrapper, this.container, operation)
.map((String pkRangeId) -> {
PartitionScopeThresholds partitionScopeThresholds =
this.partitionScopeThresholds.computeIfAbsent(
pkRangeId,
(newPkRangeId) -> new PartitionScopeThresholds(newPkRangeId, this.cosmosBulkExecutionOptions));
logger.trace("Resolved PkRangeId, {}, PKRangeId: {} Context: {} {}",
getItemOperationDiagnostics(operation),
pkRangeId,
this.operationContextText,
getThreadInfo());
return Pair.of(partitionScopeThresholds, operation);
});
})
.groupBy(Pair::getKey, Pair::getValue)
.flatMap(
this::executePartitionedGroup,
maxConcurrentCosmosPartitions)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.doOnNext(requestAndResponse -> {
int totalCountAfterDecrement = totalCount.decrementAndGet();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountAfterDecrement == 0 && mainSourceCompletedSnapshot) {
logger.debug("All work completed, {}, TotalCount: {}, Context: {} {}",
getItemOperationDiagnostics(requestAndResponse.getOperation()),
totalCountAfterDecrement,
this.operationContextText,
getThreadInfo());
completeAllSinks();
} else {
if (totalCountAfterDecrement == 0) {
logger.debug(
"No Work left - but mainSource not yet completed, Context: {} {}",
this.operationContextText,
getThreadInfo());
}
logger.trace(
"Work left - TotalCount after decrement: {}, main sink completed {}, {}, Context: {} {}",
totalCountAfterDecrement,
mainSourceCompletedSnapshot,
getItemOperationDiagnostics(requestAndResponse.getOperation()),
this.operationContextText,
getThreadInfo());
}
})
.doOnComplete(() -> {
int totalCountSnapshot = totalCount.get();
boolean mainSourceCompletedSnapshot = mainSourceCompleted.get();
if (totalCountSnapshot == 0 && mainSourceCompletedSnapshot) {
logger.debug("DoOnComplete: All work completed, Context: {}", this.operationContextText);
completeAllSinks();
} else {
logger.debug(
"DoOnComplete: Work left - TotalCount after decrement: {}, main sink completed {}, Context: {} {}",
totalCountSnapshot,
mainSourceCompletedSnapshot,
this.operationContextText,
getThreadInfo());
}
});
});
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionedGroup(
GroupedFlux<PartitionScopeThresholds, CosmosItemOperation> partitionedGroupFluxOfInputOperations) {
final PartitionScopeThresholds thresholds = partitionedGroupFluxOfInputOperations.key();
final FluxProcessor<CosmosItemOperation, CosmosItemOperation> groupFluxProcessor =
UnicastProcessor.<CosmosItemOperation>create().serialize();
final FluxSink<CosmosItemOperation> groupSink = groupFluxProcessor.sink(FluxSink.OverflowStrategy.BUFFER);
groupSinks.add(groupSink);
AtomicLong firstRecordTimeStamp = new AtomicLong(-1);
AtomicLong currentMicroBatchSize = new AtomicLong(0);
AtomicInteger currentTotalSerializedLength = new AtomicInteger(0);
return partitionedGroupFluxOfInputOperations
.mergeWith(groupFluxProcessor)
.onBackpressureBuffer()
.timestamp()
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.bufferUntil(timeStampItemOperationTuple -> {
long timestamp = timeStampItemOperationTuple.getT1();
CosmosItemOperation itemOperation = timeStampItemOperationTuple.getT2();
logger.trace(
"BufferUntil - enqueued {}, {}, Context: {} {}",
timestamp,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (itemOperation == FlushBuffersItemOperation.singleton()) {
long currentMicroBatchSizeSnapshot = currentMicroBatchSize.get();
if (currentMicroBatchSizeSnapshot > 0) {
logger.trace(
"Flushing PKRange {} (batch size: {}) due to FlushItemOperation, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
currentMicroBatchSizeSnapshot,
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
}
firstRecordTimeStamp.compareAndSet(-1, timestamp);
long age = timestamp - firstRecordTimeStamp.get();
long batchSize = currentMicroBatchSize.incrementAndGet();
int totalSerializedLength = this.calculateTotalSerializedLength(currentTotalSerializedLength, itemOperation);
if (batchSize >= thresholds.getTargetMicroBatchSizeSnapshot() ||
age >= this.maxMicroBatchIntervalInMs ||
totalSerializedLength >= BatchRequestResponseConstants.MAX_DIRECT_MODE_BATCH_REQUEST_BODY_SIZE_IN_BYTES) {
logger.debug(
"BufferUntil - Flushing PKRange {} due to BatchSize ({}), payload size ({}) or age ({}), " +
"Triggering {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
batchSize,
totalSerializedLength,
age,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
firstRecordTimeStamp.set(-1);
currentMicroBatchSize.set(0);
currentTotalSerializedLength.set(0);
return true;
}
return false;
})
.flatMap(
(List<Tuple2<Long, CosmosItemOperation>> timeStampAndItemOperationTuples) -> {
List<CosmosItemOperation> operations = new ArrayList<>(timeStampAndItemOperationTuples.size());
for (Tuple2<Long, CosmosItemOperation> timeStampAndItemOperationTuple :
timeStampAndItemOperationTuples) {
CosmosItemOperation itemOperation = timeStampAndItemOperationTuple.getT2();
if (itemOperation == FlushBuffersItemOperation.singleton()) {
continue;
}
operations.add(itemOperation);
}
logger.debug(
"Flushing PKRange {} micro batch with {} operations, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
operations.size(),
this.operationContextText,
getThreadInfo());
return executeOperations(operations, thresholds, groupSink);
},
ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getMaxMicroBatchConcurrency(this.cosmosBulkExecutionOptions));
}
private int calculateTotalSerializedLength(AtomicInteger currentTotalSerializedLength, CosmosItemOperation item) {
if (item instanceof CosmosItemOperationBase) {
return currentTotalSerializedLength.accumulateAndGet(
((CosmosItemOperationBase) item).getSerializedLength(),
(currentValue, incremental) -> currentValue + incremental);
}
return currentTotalSerializedLength.get();
}
private Flux<CosmosBulkOperationResponse<TContext>> executeOperations(
List<CosmosItemOperation> operations,
PartitionScopeThresholds thresholds,
FluxSink<CosmosItemOperation> groupSink) {
if (operations.size() == 0) {
logger.trace("Empty operations list, Context: {}", this.operationContextText);
return Flux.empty();
}
String pkRange = thresholds.getPartitionKeyRangeId();
ServerOperationBatchRequest serverOperationBatchRequest =
BulkExecutorUtil.createBatchRequest(operations, pkRange);
if (serverOperationBatchRequest.getBatchPendingOperations().size() > 0) {
serverOperationBatchRequest.getBatchPendingOperations().forEach(groupSink::next);
}
return Flux.just(serverOperationBatchRequest.getBatchRequest())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((PartitionKeyRangeServerBatchRequest serverRequest) ->
this.executePartitionKeyRangeServerBatchRequest(serverRequest, groupSink, thresholds));
}
private Flux<CosmosBulkOperationResponse<TContext>> executePartitionKeyRangeServerBatchRequest(
PartitionKeyRangeServerBatchRequest serverRequest,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
return this.executeBatchRequest(serverRequest)
.subscribeOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMapMany(response ->
Flux
.fromIterable(response.getResults())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosBatchOperationResult result) ->
handleTransactionalBatchOperationResult(response, result, groupSink, thresholds)))
.onErrorResume((Throwable throwable) -> {
if (!(throwable instanceof Exception)) {
throw Exceptions.propagate(throwable);
}
Exception exception = (Exception) throwable;
return Flux
.fromIterable(serverRequest.getOperations())
.publishOn(CosmosSchedulers.BULK_EXECUTOR_BOUNDED_ELASTIC)
.flatMap((CosmosItemOperation itemOperation) ->
handleTransactionalBatchExecutionException(itemOperation, exception, groupSink, thresholds));
});
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchOperationResult(
CosmosBatchResponse response,
CosmosBatchOperationResult operationResult,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
CosmosBulkItemResponse cosmosBulkItemResponse = ModelBridgeInternal
.createCosmosBulkItemResponse(operationResult, response);
CosmosItemOperation itemOperation = operationResult.getOperation();
TContext actualContext = this.getActualContext(itemOperation);
logger.debug(
"HandleTransactionalBatchOperationResult - PKRange {}, Response Status Code {}, " +
"Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (!operationResult.isSuccessStatusCode()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy().shouldRetry(operationResult).flatMap(
result -> {
if (result.shouldRetry) {
logger.debug(
"HandleTransactionalBatchOperationResult - enqueue retry, PKRange {}, Response " +
"Status Code {}, Operation Status Code, {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return this.enqueueForRetry(result.backOffTime, groupSink, itemOperation, thresholds);
} else {
if (response.getStatusCode() == HttpConstants.StatusCodes.CONFLICT ||
response.getStatusCode() == HttpConstants.StatusCodes.PRECONDITION_FAILED) {
logger.debug(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
} else {
logger.error(
"HandleTransactionalBatchOperationResult - Fail, PKRange {}, Response Status " +
"Code {}, Operation Status Code {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
response.getStatusCode(),
operationResult.getStatusCode(),
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
}
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, cosmosBulkItemResponse, actualContext));
}
});
} else {
throw new UnsupportedOperationException("Unknown CosmosItemOperation.");
}
}
thresholds.recordSuccessfulOperation();
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation,
cosmosBulkItemResponse,
actualContext));
}
private TContext getActualContext(CosmosItemOperation itemOperation) {
ItemBulkOperation<?, ?> itemBulkOperation = null;
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
}
if (itemBulkOperation == null) {
return this.batchContext;
}
TContext operationContext = itemBulkOperation.getContext();
if (operationContext != null) {
return operationContext;
}
return this.batchContext;
}
private Mono<CosmosBulkOperationResponse<TContext>> handleTransactionalBatchExecutionException(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
PartitionScopeThresholds thresholds) {
logger.debug(
"HandleTransactionalBatchExecutionException, PKRange {}, Error: {}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
if (exception instanceof CosmosException && itemOperation instanceof ItemBulkOperation<?, ?>) {
CosmosException cosmosException = (CosmosException) exception;
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
return itemBulkOperation.getRetryPolicy()
.shouldRetryForGone(cosmosException.getStatusCode(), cosmosException.getSubStatusCode())
.flatMap(shouldRetryGone -> {
if (shouldRetryGone) {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry due to split, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
mainSink.emitNext(itemOperation, serializedEmitFailureHandler);
return Mono.empty();
} else {
logger.debug(
"HandleTransactionalBatchExecutionException - Retry other, PKRange {}, Error: " +
"{}, {}, Context: {} {}",
thresholds.getPartitionKeyRangeId(),
exception,
getItemOperationDiagnostics(itemOperation),
this.operationContextText,
getThreadInfo());
return retryOtherExceptions(
itemOperation,
exception,
groupSink,
cosmosException,
itemBulkOperation,
thresholds);
}
});
}
TContext actualContext = this.getActualContext(itemOperation);
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(itemOperation, exception, actualContext));
}
private Mono<CosmosBulkOperationResponse<TContext>> enqueueForRetry(
Duration backOffTime,
FluxSink<CosmosItemOperation> groupSink,
CosmosItemOperation itemOperation,
PartitionScopeThresholds thresholds) {
thresholds.recordEnqueuedRetry();
if (backOffTime == null || backOffTime.isZero()) {
groupSink.next(itemOperation);
return Mono.empty();
} else {
return Mono
.delay(backOffTime)
.flatMap((dummy) -> {
groupSink.next(itemOperation);
return Mono.empty();
});
}
}
private Mono<CosmosBulkOperationResponse<TContext>> retryOtherExceptions(
CosmosItemOperation itemOperation,
Exception exception,
FluxSink<CosmosItemOperation> groupSink,
CosmosException cosmosException,
ItemBulkOperation<?, ?> itemBulkOperation,
PartitionScopeThresholds thresholds) {
TContext actualContext = this.getActualContext(itemOperation);
return itemBulkOperation.getRetryPolicy().shouldRetry(cosmosException).flatMap(result -> {
if (result.shouldRetry) {
return this.enqueueForRetry(result.backOffTime, groupSink, itemBulkOperation, thresholds);
} else {
return Mono.just(ModelBridgeInternal.createCosmosBulkOperationResponse(
itemOperation, exception, actualContext));
}
});
}
private Mono<CosmosBatchResponse> executeBatchRequest(PartitionKeyRangeServerBatchRequest serverRequest) {
RequestOptions options = new RequestOptions();
options.setThroughputControlGroupName(cosmosBulkExecutionOptions.getThroughputControlGroupName());
Map<String, String> customOptions = ImplementationBridgeHelpers.CosmosBulkExecutionOptionsHelper
.getCosmosBulkExecutionOptionsAccessor()
.getCustomOptions(cosmosBulkExecutionOptions);
if (customOptions != null && !customOptions.isEmpty()) {
for(Map.Entry<String, String> entry : customOptions.entrySet()) {
options.setHeader(entry.getKey(), entry.getValue());
}
}
options.setOperationContextAndListenerTuple(operationListener);
if (!this.docClientWrapper.isContentResponseOnWriteEnabled() &&
serverRequest.getOperations().size() > 0) {
for (CosmosItemOperation itemOperation : serverRequest.getOperations()) {
if (itemOperation instanceof ItemBulkOperation<?, ?>) {
ItemBulkOperation<?, ?> itemBulkOperation = (ItemBulkOperation<?, ?>) itemOperation;
if (itemBulkOperation.getOperationType() == CosmosItemOperationType.READ ||
(itemBulkOperation.getRequestOptions() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled() != null &&
itemBulkOperation.getRequestOptions().isContentResponseOnWriteEnabled().booleanValue())) {
options.setContentResponseOnWriteEnabled(true);
break;
}
}
}
}
return withContext(context -> {
final Mono<CosmosBatchResponse> responseMono = this.docClientWrapper.executeBatchRequest(
BridgeInternal.getLink(this.container), serverRequest, options, false);
return BridgeInternal.getTracerProvider(this.cosmosClient)
.traceEnabledBatchResponsePublisher(
responseMono,
context,
this.bulkSpanName,
this.container.getId(),
this.container.getDatabase().getId(),
this.cosmosClient,
options.getConsistencyLevel(),
OperationType.Batch,
ResourceType.Document);
});
}
private void onFlush() {
try {
this.groupSinks.forEach(sink -> sink.next(FlushBuffersItemOperation.singleton()));
} catch(Throwable t) {
logger.error("Callback invocation 'onFlush' failed. Context: {}", this.operationContextText, t);
}
}
private static String getItemOperationDiagnostics(CosmosItemOperation operation) {
if (operation == FlushBuffersItemOperation.singleton()) {
return "ItemOperation[Type: Flush]";
}
StringBuilder sb = new StringBuilder();
sb
.append("ItemOperation[Type: ")
.append(operation.getOperationType().toString())
.append(", PK: ")
.append(operation.getPartitionKeyValue() != null ? operation.getPartitionKeyValue().toString() : "n/a")
.append(", id: ")
.append(operation.getId())
.append("]");
return sb.toString();
}
private static String getThreadInfo() {
StringBuilder sb = new StringBuilder();
Thread t = Thread.currentThread();
sb
.append("Thread[")
.append("Name: ")
.append(t.getName())
.append(",Group: ")
.append(t.getThreadGroup() != null ? t.getThreadGroup().getName() : "n/a")
.append(", isDaemon: ")
.append(t.isDaemon())
.append(", Id: ")
.append(t.getId())
.append("]");
return sb.toString();
}
private class SerializedEmitFailureHandler implements Sinks.EmitFailureHandler {
@Override
public boolean onEmitFailure(SignalType signalType, Sinks.EmitResult emitResult) {
if (emitResult.equals(Sinks.EmitResult.FAIL_NON_SERIALIZED)) {
logger.debug("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return true;
}
logger.error("SerializedEmitFailureHandler.onEmitFailure - Signal:{}, Result: {}", signalType, emitResult);
return false;
}
}
} |
you should add the autoconfiguration class here | void withoutObjectMapperShouldNotConfigure() {
this.contextRunner
.withClassLoader(new FilteredClassLoader(ObjectMapper.class))
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> assertThatIllegalStateException());
} | .withClassLoader(new FilteredClassLoader(ObjectMapper.class)) | void withoutObjectMapperShouldNotConfigure() {
this.contextRunner
.withClassLoader(new FilteredClassLoader(ObjectMapper.class))
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> assertThatIllegalStateException());
} | class AzureEventHubsMessagingAutoConfigurationTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(AzureEventHubsMessagingAutoConfiguration.class,
JacksonAutoConfiguration.class));
@Test
void disableEventHubsShouldNotConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.enabled=false",
"spring.cloud.azure.eventhubs.namespace=test-namespace"
)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutEventHubsTemplateShouldNotConfigure() {
this.contextRunner
.withClassLoader(new FilteredClassLoader(EventHubsTemplate.class))
.withPropertyValues(
"spring.cloud.azure.eventhubs.namespace=test-namespace"
)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutEventHubConnectionShouldNotConfigure() {
this.contextRunner
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean(CheckpointStore.class, TestCheckpointStore::new)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutCheckpointStoreShouldConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void connectionInfoAndCheckpointStoreProvidedShouldConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean(CheckpointStore.class, TestCheckpointStore::new)
.run(context -> {
assertThat(context).hasSingleBean(EventHubsProcessorFactory.class);
assertThat(context).hasSingleBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class);
});
}
@Test
@Test
void withIsolatedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"))
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> {
assertNotNull(context.getBean("eventHubsMessageConverter"));
assertThrows(NoSuchBeanDefinitionException.class, () -> context.getBean("eventHubsMessageConverterWithNonIsolatedObjectMapper"));
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
});
}
@Test
void withNonIsolatedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"),
"spring.cloud.azure.message-converter.isolated-object-mapper.enabled=false")
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> {
assertNotNull(context.getBean("eventHubsMessageConverterWithNonIsolatedObjectMapper"));
assertThrows(NoSuchBeanDefinitionException.class, () -> context.getBean("eventHubsMessageConverter"));
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
});
}
} | class AzureEventHubsMessagingAutoConfigurationTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(AzureEventHubsMessagingAutoConfiguration.class));
@Test
void disableEventHubsShouldNotConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.enabled=false",
"spring.cloud.azure.eventhubs.namespace=test-namespace"
)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutEventHubsTemplateShouldNotConfigure() {
this.contextRunner
.withClassLoader(new FilteredClassLoader(EventHubsTemplate.class))
.withPropertyValues(
"spring.cloud.azure.eventhubs.namespace=test-namespace"
)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutEventHubConnectionShouldNotConfigure() {
this.contextRunner
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean(CheckpointStore.class, TestCheckpointStore::new)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutCheckpointStoreShouldConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void connectionInfoAndCheckpointStoreProvidedShouldConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean(CheckpointStore.class, TestCheckpointStore::new)
.run(context -> {
assertThat(context).hasSingleBean(EventHubsProcessorFactory.class);
assertThat(context).hasSingleBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class);
});
}
@Test
@Test
void withIsolatedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"))
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.run(context -> {
assertThat(context).hasBean("defaultEventHubsMessageConverter");
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
assertThat(context).doesNotHaveBean("eventHubsMessageConverter");
});
}
@Test
void withNonIsolatedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"),
"spring.cloud.azure.message-converter.isolated-object-mapper=false")
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.run(context -> {
assertThat(context).hasBean("eventHubsMessageConverter");
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
assertThat(context).doesNotHaveBean("defaultEventHubsMessageConverter");
});
}
@Test
void withUserProvidedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"),
"spring.cloud.azure.message-converter.isolated-object-mapper=false")
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean("userObjectMapper", ObjectMapper.class, () -> new ObjectMapper())
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.run(context -> {
assertThat(context).hasBean("userObjectMapper");
assertThat(context).hasSingleBean(ObjectMapper.class);
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
});
}
} |
can we use this method `assertThat(context).doesNotHaveBean(beanName)`? | void withNonIsolatedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"),
"spring.cloud.azure.message-converter.isolated-object-mapper=false")
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.run(context -> {
assertNotNull(context.getBean("eventHubsMessageConverter"));
assertThrows(NoSuchBeanDefinitionException.class, () -> context.getBean("defaultEventHubsMessageConverter"));
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
});
} | assertThrows(NoSuchBeanDefinitionException.class, () -> context.getBean("defaultEventHubsMessageConverter")); | void withNonIsolatedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"),
"spring.cloud.azure.message-converter.isolated-object-mapper=false")
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.run(context -> {
assertThat(context).hasBean("eventHubsMessageConverter");
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
assertThat(context).doesNotHaveBean("defaultEventHubsMessageConverter");
});
} | class AzureEventHubsMessagingAutoConfigurationTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(AzureEventHubsMessagingAutoConfiguration.class));
@Test
void disableEventHubsShouldNotConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.enabled=false",
"spring.cloud.azure.eventhubs.namespace=test-namespace"
)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutEventHubsTemplateShouldNotConfigure() {
this.contextRunner
.withClassLoader(new FilteredClassLoader(EventHubsTemplate.class))
.withPropertyValues(
"spring.cloud.azure.eventhubs.namespace=test-namespace"
)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutEventHubConnectionShouldNotConfigure() {
this.contextRunner
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean(CheckpointStore.class, TestCheckpointStore::new)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutCheckpointStoreShouldConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void connectionInfoAndCheckpointStoreProvidedShouldConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean(CheckpointStore.class, TestCheckpointStore::new)
.run(context -> {
assertThat(context).hasSingleBean(EventHubsProcessorFactory.class);
assertThat(context).hasSingleBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class);
});
}
@Test
void withoutObjectMapperShouldNotConfigure() {
this.contextRunner
.withClassLoader(new FilteredClassLoader(ObjectMapper.class))
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> assertThatIllegalStateException());
}
@Test
void withIsolatedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"))
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.run(context -> {
assertNotNull(context.getBean("defaultEventHubsMessageConverter"));
assertThrows(NoSuchBeanDefinitionException.class, () -> context.getBean("eventHubsMessageConverter"));
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
});
}
@Test
} | class AzureEventHubsMessagingAutoConfigurationTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(AzureEventHubsMessagingAutoConfiguration.class));
@Test
void disableEventHubsShouldNotConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.enabled=false",
"spring.cloud.azure.eventhubs.namespace=test-namespace"
)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutEventHubsTemplateShouldNotConfigure() {
this.contextRunner
.withClassLoader(new FilteredClassLoader(EventHubsTemplate.class))
.withPropertyValues(
"spring.cloud.azure.eventhubs.namespace=test-namespace"
)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutEventHubConnectionShouldNotConfigure() {
this.contextRunner
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean(CheckpointStore.class, TestCheckpointStore::new)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void withoutCheckpointStoreShouldConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> assertThat(context).doesNotHaveBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class));
}
@Test
void connectionInfoAndCheckpointStoreProvidedShouldConfigure() {
this.contextRunner
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean(CheckpointStore.class, TestCheckpointStore::new)
.run(context -> {
assertThat(context).hasSingleBean(EventHubsProcessorFactory.class);
assertThat(context).hasSingleBean(AzureEventHubsMessagingAutoConfiguration.ProcessorContainerConfiguration.class);
});
}
@Test
void withoutObjectMapperShouldNotConfigure() {
this.contextRunner
.withClassLoader(new FilteredClassLoader(ObjectMapper.class))
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.withPropertyValues(
"spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace")
)
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.run(context -> assertThatIllegalStateException());
}
@Test
void withIsolatedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"))
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.run(context -> {
assertThat(context).hasBean("defaultEventHubsMessageConverter");
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
assertThat(context).doesNotHaveBean("eventHubsMessageConverter");
});
}
@Test
@Test
void withUserProvidedObjectMapper() {
this.contextRunner
.withPropertyValues("spring.cloud.azure.eventhubs.connection-string=" + String.format(CONNECTION_STRING_FORMAT, "test-namespace"),
"spring.cloud.azure.message-converter.isolated-object-mapper=false")
.withUserConfiguration(AzureEventHubsPropertiesTestConfiguration.class)
.withBean("userObjectMapper", ObjectMapper.class, () -> new ObjectMapper())
.withConfiguration(AutoConfigurations.of(JacksonAutoConfiguration.class))
.run(context -> {
assertThat(context).hasBean("userObjectMapper");
assertThat(context).hasSingleBean(ObjectMapper.class);
assertThat(context).hasSingleBean(EventHubsMessageConverter.class);
});
}
} |
do we need to get `patchedPerson`? it is not used anywhere | public void testPatch() {
insertedPerson = cosmosTemplate.patch(insertedPerson, operations, null);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge(), PATCH_AGE_1);
} | assertEquals(insertedPerson.getAge(), PATCH_AGE_1); | public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.replace("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
@Test
public void testPatchMultiOperations() {
insertedPerson = cosmosTemplate.patch(insertedPerson, multiPatchOperations, null);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(insertedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(insertedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(insertedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
insertedPerson = cosmosTemplate.patch(insertedPerson, operations,options);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge(), patchedPerson.getAge());
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
wonder whether the age verification can also be part of StepVerifier flow -> `expectNextMatches(person -> person.getAge() == PATCH_AGE_1)` | public void testPatch() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson, operations, null);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextCount(1).verifyComplete();
assertEquals(patchedPerson.block().getAge(), PATCH_AGE_1);
} | assertEquals(patchedPerson.block().getAge(), PATCH_AGE_1); | public void testPatch() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextMatches(person -> person.getAge() == PATCH_AGE_1).verifyComplete();
} | class ReactiveCosmosTemplateIT {
private static final Person TEST_PERSON = new Person(TestConstants.ID_1, TestConstants.FIRST_NAME,
TestConstants.LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(TestConstants.ID_2, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(TestConstants.ID_3, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_4 = new Person(TestConstants.ID_4, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.replace("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final ReactiveIntegrationTestCollectionManager collectionManager = new ReactiveIntegrationTestCollectionManager();
@Value("${cosmos.secondaryKey}")
private String cosmosDbSecondaryKey;
@Value("${cosmos.key}")
private String cosmosDbKey;
private static CosmosAsyncClient client;
private static ReactiveCosmosTemplate cosmosTemplate;
private static String containerName;
private static CosmosEntityInformation<Person, String> personInfo;
private static AzureKeyCredential azureKeyCredential;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
@Autowired
private AuditableRepository auditableRepository;
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
azureKeyCredential = new AzureKeyCredential(cosmosDbKey);
cosmosClientBuilder.credential(azureKeyCredential);
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createReactiveCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class, GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON))).block();
}
private ReactiveCosmosTemplate createReactiveCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new ReactiveCosmosTemplate(cosmosFactory, config, cosmosConverter);
}
@After
public void cleanup() {
azureKeyCredential.update(cosmosDbKey);
}
@Test
public void testInsertDuplicateId() {
final Mono<Person> insertMono = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
StepVerifier.create(insertMono)
.expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof ConflictException)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByID() {
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIDBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById).consumeNextWith(actual -> {
Assert.assertEquals(actual.getFirstName(), TEST_PERSON.getFirstName());
Assert.assertEquals(actual.getLastName(), TEST_PERSON.getLastName());
}).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAll() {
final Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIdWithContainerName() {
StepVerifier.create(cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class))
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testInsert() {
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertWithContainerName() {
StepVerifier.create(cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2))))
.expectNext(TEST_PERSON_2).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
Mono<Person> entityMono = cosmosTemplate.insert(Person.class.getSimpleName(),
person, new PartitionKey(person.getLastName()));
StepVerifier.create(entityMono).verifyError(CosmosAccessException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final Mono<GenIdEntity> insertedEntityMono = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
GenIdEntity insertedEntity = insertedEntityMono.block();
assertThat(insertedEntity).isNotNull();
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testUpsert() {
final Person p = TEST_PERSON_2;
p.set_etag(insertedPerson.get_etag());
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
@Test
public void testPatchMultiOperations() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson, multiPatchOperations, null);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextCount(1).verifyComplete();
assertEquals(patchedPerson.block().getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.block().getHobbies(),PATCH_HOBBIES);
assertEquals(patchedPerson.block().getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.block().getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionFail() {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Mono<Person> person = cosmosTemplate.patch(insertedPerson, operations, options);
StepVerifier.create(person).expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof PreconditionFailedException).verify();
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), TestConstants.UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(updated).block();
} catch (CosmosAccessException cosmosAccessException) {
assertThat(cosmosAccessException.getCosmosException()).isNotNull();
final Throwable cosmosClientException = cosmosAccessException.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
final Mono<Person> unmodifiedPerson =
cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
StepVerifier.create(unmodifiedPerson).expectNextMatches(person ->
person.getFirstName().equals(insertedPerson.getFirstName())).verifyComplete();
return;
}
fail();
}
@Test
public void testUpsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testUpsertWithContainerName() {
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(Person.class.getSimpleName(), p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByIdBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFind() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Flux<Person> personFlux = cosmosTemplate.find(query, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFlux).expectNextCount(1).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Flux<Person> personFluxIgnoreCase = cosmosTemplate.find(queryIgnoreCase, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFluxIgnoreCase).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Mono<Boolean> exists = cosmosTemplate.exists(query, Person.class, containerName);
StepVerifier.create(exists).expectNext(true).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Mono<Boolean> existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
StepVerifier.create(existsIgnoreCase).expectNext(true).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCount() {
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
}
@Test
public void testInvalidSecondaryKey() {
azureKeyCredential.update("Invalid secondary key");
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.expectError(CosmosAccessException.class)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
final Flux<Person> flux = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithReturnTypeContainingLocalDateTime() {
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = cosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
final Flux<Person> people = cosmosTemplate.find(new CosmosQuery(criteria), Person.class, containerName);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpecialChars() {
String ivoryCoastPassportId = PASSPORT_IDS_BY_COUNTRY.get("Côte d'Ivoire");
String subjectWithSpecialChars = "passportIdsByCountry[\"Côte d'Ivoire\"]";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpecialChars,
Collections.singletonList(ivoryCoastPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Flux<Person> people = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void createWithAutoscale() {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerResponse containerResponse = cosmosTemplate
.createContainerIfNotExists(autoScaleSampleInfo)
.block();
assertNotNull(containerResponse);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-other";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final ReactiveCosmosTemplate configuredThroughputCosmosTemplate = createReactiveCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo).block();
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final ReactiveCosmosTemplate maxDegreeOfParallelismCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxDegreeOfParallelismCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final ReactiveCosmosTemplate maxBufferedItemCountCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxBufferedItemCountCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final ReactiveCosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = responseContinuationTokenLimitInKbCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final ReactiveCosmosTemplate queryMetricsEnabledCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = queryMetricsEnabledCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class ReactiveCosmosTemplateIT {
private static final Person TEST_PERSON = new Person(TestConstants.ID_1, TestConstants.FIRST_NAME,
TestConstants.LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(TestConstants.ID_2, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(TestConstants.ID_3, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_4 = new Person(TestConstants.ID_4, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final ReactiveIntegrationTestCollectionManager collectionManager = new ReactiveIntegrationTestCollectionManager();
@Value("${cosmos.secondaryKey}")
private String cosmosDbSecondaryKey;
@Value("${cosmos.key}")
private String cosmosDbKey;
private static CosmosAsyncClient client;
private static ReactiveCosmosTemplate cosmosTemplate;
private static String containerName;
private static CosmosEntityInformation<Person, String> personInfo;
private static AzureKeyCredential azureKeyCredential;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
@Autowired
private AuditableRepository auditableRepository;
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
azureKeyCredential = new AzureKeyCredential(cosmosDbKey);
cosmosClientBuilder.credential(azureKeyCredential);
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createReactiveCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class, GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON))).block();
}
private ReactiveCosmosTemplate createReactiveCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new ReactiveCosmosTemplate(cosmosFactory, config, cosmosConverter);
}
@After
public void cleanup() {
azureKeyCredential.update(cosmosDbKey);
}
@Test
public void testInsertDuplicateId() {
final Mono<Person> insertMono = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
StepVerifier.create(insertMono)
.expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof ConflictException)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByID() {
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIDBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById).consumeNextWith(actual -> {
Assert.assertEquals(actual.getFirstName(), TEST_PERSON.getFirstName());
Assert.assertEquals(actual.getLastName(), TEST_PERSON.getLastName());
}).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAll() {
final Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIdWithContainerName() {
StepVerifier.create(cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class))
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testInsert() {
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertWithContainerName() {
StepVerifier.create(cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2))))
.expectNext(TEST_PERSON_2).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
Mono<Person> entityMono = cosmosTemplate.insert(Person.class.getSimpleName(),
person, new PartitionKey(person.getLastName()));
StepVerifier.create(entityMono).verifyError(CosmosAccessException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final Mono<GenIdEntity> insertedEntityMono = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
GenIdEntity insertedEntity = insertedEntityMono.block();
assertThat(insertedEntity).isNotNull();
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testUpsert() {
final Person p = TEST_PERSON_2;
p.set_etag(insertedPerson.get_etag());
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
@Test
public void testPatchMultiOperations() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class).block();
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(),PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Mono<Person> patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
StepVerifier.create(patchedPerson).expectNextMatches(person -> person.getAge() == PATCH_AGE_1).verifyComplete();
}
@Test
public void testPatchPreConditionFail() {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Mono<Person> person = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
StepVerifier.create(person).expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof PreconditionFailedException).verify();
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), TestConstants.UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(updated).block();
} catch (CosmosAccessException cosmosAccessException) {
assertThat(cosmosAccessException.getCosmosException()).isNotNull();
final Throwable cosmosClientException = cosmosAccessException.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
final Mono<Person> unmodifiedPerson =
cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
StepVerifier.create(unmodifiedPerson).expectNextMatches(person ->
person.getFirstName().equals(insertedPerson.getFirstName())).verifyComplete();
return;
}
fail();
}
@Test
public void testUpsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testUpsertWithContainerName() {
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(Person.class.getSimpleName(), p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByIdBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFind() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Flux<Person> personFlux = cosmosTemplate.find(query, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFlux).expectNextCount(1).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Flux<Person> personFluxIgnoreCase = cosmosTemplate.find(queryIgnoreCase, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFluxIgnoreCase).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Mono<Boolean> exists = cosmosTemplate.exists(query, Person.class, containerName);
StepVerifier.create(exists).expectNext(true).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Mono<Boolean> existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
StepVerifier.create(existsIgnoreCase).expectNext(true).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCount() {
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
}
@Test
public void testInvalidSecondaryKey() {
azureKeyCredential.update("Invalid secondary key");
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.expectError(CosmosAccessException.class)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
final Flux<Person> flux = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithReturnTypeContainingLocalDateTime() {
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = cosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
final Flux<Person> people = cosmosTemplate.find(new CosmosQuery(criteria), Person.class, containerName);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpecialChars() {
String ivoryCoastPassportId = PASSPORT_IDS_BY_COUNTRY.get("Côte d'Ivoire");
String subjectWithSpecialChars = "passportIdsByCountry[\"Côte d'Ivoire\"]";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpecialChars,
Collections.singletonList(ivoryCoastPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Flux<Person> people = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void createWithAutoscale() {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerResponse containerResponse = cosmosTemplate
.createContainerIfNotExists(autoScaleSampleInfo)
.block();
assertNotNull(containerResponse);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-other";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final ReactiveCosmosTemplate configuredThroughputCosmosTemplate = createReactiveCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo).block();
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final ReactiveCosmosTemplate maxDegreeOfParallelismCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxDegreeOfParallelismCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final ReactiveCosmosTemplate maxBufferedItemCountCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxBufferedItemCountCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final ReactiveCosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = responseContinuationTokenLimitInKbCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final ReactiveCosmosTemplate queryMetricsEnabledCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = queryMetricsEnabledCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
instead of calling .block() for each asserts, either we can add this validation be part of the StepVerifier flow or just call .block() once (to make it a little bit more efficient), else it will trigger multiple calls | public void testPatchMultiOperations() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson, multiPatchOperations, null);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextCount(1).verifyComplete();
assertEquals(patchedPerson.block().getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.block().getHobbies(),PATCH_HOBBIES);
assertEquals(patchedPerson.block().getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.block().getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
} | assertEquals(patchedPerson.block().getAge().intValue(), (AGE + PATCH_AGE_INCREMENT)); | public void testPatchMultiOperations() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class).block();
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(),PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
} | class ReactiveCosmosTemplateIT {
private static final Person TEST_PERSON = new Person(TestConstants.ID_1, TestConstants.FIRST_NAME,
TestConstants.LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(TestConstants.ID_2, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(TestConstants.ID_3, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_4 = new Person(TestConstants.ID_4, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.replace("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final ReactiveIntegrationTestCollectionManager collectionManager = new ReactiveIntegrationTestCollectionManager();
@Value("${cosmos.secondaryKey}")
private String cosmosDbSecondaryKey;
@Value("${cosmos.key}")
private String cosmosDbKey;
private static CosmosAsyncClient client;
private static ReactiveCosmosTemplate cosmosTemplate;
private static String containerName;
private static CosmosEntityInformation<Person, String> personInfo;
private static AzureKeyCredential azureKeyCredential;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
@Autowired
private AuditableRepository auditableRepository;
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
azureKeyCredential = new AzureKeyCredential(cosmosDbKey);
cosmosClientBuilder.credential(azureKeyCredential);
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createReactiveCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class, GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON))).block();
}
private ReactiveCosmosTemplate createReactiveCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new ReactiveCosmosTemplate(cosmosFactory, config, cosmosConverter);
}
@After
public void cleanup() {
azureKeyCredential.update(cosmosDbKey);
}
@Test
public void testInsertDuplicateId() {
final Mono<Person> insertMono = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
StepVerifier.create(insertMono)
.expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof ConflictException)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByID() {
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIDBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById).consumeNextWith(actual -> {
Assert.assertEquals(actual.getFirstName(), TEST_PERSON.getFirstName());
Assert.assertEquals(actual.getLastName(), TEST_PERSON.getLastName());
}).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAll() {
final Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIdWithContainerName() {
StepVerifier.create(cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class))
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testInsert() {
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertWithContainerName() {
StepVerifier.create(cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2))))
.expectNext(TEST_PERSON_2).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
Mono<Person> entityMono = cosmosTemplate.insert(Person.class.getSimpleName(),
person, new PartitionKey(person.getLastName()));
StepVerifier.create(entityMono).verifyError(CosmosAccessException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final Mono<GenIdEntity> insertedEntityMono = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
GenIdEntity insertedEntity = insertedEntityMono.block();
assertThat(insertedEntity).isNotNull();
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testUpsert() {
final Person p = TEST_PERSON_2;
p.set_etag(insertedPerson.get_etag());
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testPatch() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson, operations, null);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextCount(1).verifyComplete();
assertEquals(patchedPerson.block().getAge(), PATCH_AGE_1);
}
@Test
@Test
public void testPatchPreConditionFail() {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Mono<Person> person = cosmosTemplate.patch(insertedPerson, operations, options);
StepVerifier.create(person).expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof PreconditionFailedException).verify();
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), TestConstants.UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(updated).block();
} catch (CosmosAccessException cosmosAccessException) {
assertThat(cosmosAccessException.getCosmosException()).isNotNull();
final Throwable cosmosClientException = cosmosAccessException.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
final Mono<Person> unmodifiedPerson =
cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
StepVerifier.create(unmodifiedPerson).expectNextMatches(person ->
person.getFirstName().equals(insertedPerson.getFirstName())).verifyComplete();
return;
}
fail();
}
@Test
public void testUpsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testUpsertWithContainerName() {
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(Person.class.getSimpleName(), p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByIdBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFind() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Flux<Person> personFlux = cosmosTemplate.find(query, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFlux).expectNextCount(1).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Flux<Person> personFluxIgnoreCase = cosmosTemplate.find(queryIgnoreCase, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFluxIgnoreCase).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Mono<Boolean> exists = cosmosTemplate.exists(query, Person.class, containerName);
StepVerifier.create(exists).expectNext(true).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Mono<Boolean> existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
StepVerifier.create(existsIgnoreCase).expectNext(true).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCount() {
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
}
@Test
public void testInvalidSecondaryKey() {
azureKeyCredential.update("Invalid secondary key");
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.expectError(CosmosAccessException.class)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
final Flux<Person> flux = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithReturnTypeContainingLocalDateTime() {
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = cosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
final Flux<Person> people = cosmosTemplate.find(new CosmosQuery(criteria), Person.class, containerName);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpecialChars() {
String ivoryCoastPassportId = PASSPORT_IDS_BY_COUNTRY.get("Côte d'Ivoire");
String subjectWithSpecialChars = "passportIdsByCountry[\"Côte d'Ivoire\"]";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpecialChars,
Collections.singletonList(ivoryCoastPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Flux<Person> people = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void createWithAutoscale() {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerResponse containerResponse = cosmosTemplate
.createContainerIfNotExists(autoScaleSampleInfo)
.block();
assertNotNull(containerResponse);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-other";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final ReactiveCosmosTemplate configuredThroughputCosmosTemplate = createReactiveCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo).block();
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final ReactiveCosmosTemplate maxDegreeOfParallelismCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxDegreeOfParallelismCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final ReactiveCosmosTemplate maxBufferedItemCountCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxBufferedItemCountCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final ReactiveCosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = responseContinuationTokenLimitInKbCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final ReactiveCosmosTemplate queryMetricsEnabledCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = queryMetricsEnabledCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class ReactiveCosmosTemplateIT {
private static final Person TEST_PERSON = new Person(TestConstants.ID_1, TestConstants.FIRST_NAME,
TestConstants.LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(TestConstants.ID_2, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(TestConstants.ID_3, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_4 = new Person(TestConstants.ID_4, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final ReactiveIntegrationTestCollectionManager collectionManager = new ReactiveIntegrationTestCollectionManager();
@Value("${cosmos.secondaryKey}")
private String cosmosDbSecondaryKey;
@Value("${cosmos.key}")
private String cosmosDbKey;
private static CosmosAsyncClient client;
private static ReactiveCosmosTemplate cosmosTemplate;
private static String containerName;
private static CosmosEntityInformation<Person, String> personInfo;
private static AzureKeyCredential azureKeyCredential;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
@Autowired
private AuditableRepository auditableRepository;
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
azureKeyCredential = new AzureKeyCredential(cosmosDbKey);
cosmosClientBuilder.credential(azureKeyCredential);
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createReactiveCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class, GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON))).block();
}
private ReactiveCosmosTemplate createReactiveCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new ReactiveCosmosTemplate(cosmosFactory, config, cosmosConverter);
}
@After
public void cleanup() {
azureKeyCredential.update(cosmosDbKey);
}
@Test
public void testInsertDuplicateId() {
final Mono<Person> insertMono = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
StepVerifier.create(insertMono)
.expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof ConflictException)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByID() {
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIDBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById).consumeNextWith(actual -> {
Assert.assertEquals(actual.getFirstName(), TEST_PERSON.getFirstName());
Assert.assertEquals(actual.getLastName(), TEST_PERSON.getLastName());
}).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAll() {
final Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIdWithContainerName() {
StepVerifier.create(cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class))
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testInsert() {
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertWithContainerName() {
StepVerifier.create(cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2))))
.expectNext(TEST_PERSON_2).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
Mono<Person> entityMono = cosmosTemplate.insert(Person.class.getSimpleName(),
person, new PartitionKey(person.getLastName()));
StepVerifier.create(entityMono).verifyError(CosmosAccessException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final Mono<GenIdEntity> insertedEntityMono = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
GenIdEntity insertedEntity = insertedEntityMono.block();
assertThat(insertedEntity).isNotNull();
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testUpsert() {
final Person p = TEST_PERSON_2;
p.set_etag(insertedPerson.get_etag());
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testPatch() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextMatches(person -> person.getAge() == PATCH_AGE_1).verifyComplete();
}
@Test
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Mono<Person> patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
StepVerifier.create(patchedPerson).expectNextMatches(person -> person.getAge() == PATCH_AGE_1).verifyComplete();
}
@Test
public void testPatchPreConditionFail() {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Mono<Person> person = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
StepVerifier.create(person).expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof PreconditionFailedException).verify();
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), TestConstants.UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(updated).block();
} catch (CosmosAccessException cosmosAccessException) {
assertThat(cosmosAccessException.getCosmosException()).isNotNull();
final Throwable cosmosClientException = cosmosAccessException.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
final Mono<Person> unmodifiedPerson =
cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
StepVerifier.create(unmodifiedPerson).expectNextMatches(person ->
person.getFirstName().equals(insertedPerson.getFirstName())).verifyComplete();
return;
}
fail();
}
@Test
public void testUpsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testUpsertWithContainerName() {
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(Person.class.getSimpleName(), p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByIdBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFind() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Flux<Person> personFlux = cosmosTemplate.find(query, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFlux).expectNextCount(1).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Flux<Person> personFluxIgnoreCase = cosmosTemplate.find(queryIgnoreCase, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFluxIgnoreCase).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Mono<Boolean> exists = cosmosTemplate.exists(query, Person.class, containerName);
StepVerifier.create(exists).expectNext(true).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Mono<Boolean> existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
StepVerifier.create(existsIgnoreCase).expectNext(true).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCount() {
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
}
@Test
public void testInvalidSecondaryKey() {
azureKeyCredential.update("Invalid secondary key");
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.expectError(CosmosAccessException.class)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
final Flux<Person> flux = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithReturnTypeContainingLocalDateTime() {
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = cosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
final Flux<Person> people = cosmosTemplate.find(new CosmosQuery(criteria), Person.class, containerName);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpecialChars() {
String ivoryCoastPassportId = PASSPORT_IDS_BY_COUNTRY.get("Côte d'Ivoire");
String subjectWithSpecialChars = "passportIdsByCountry[\"Côte d'Ivoire\"]";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpecialChars,
Collections.singletonList(ivoryCoastPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Flux<Person> people = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void createWithAutoscale() {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerResponse containerResponse = cosmosTemplate
.createContainerIfNotExists(autoScaleSampleInfo)
.block();
assertNotNull(containerResponse);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-other";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final ReactiveCosmosTemplate configuredThroughputCosmosTemplate = createReactiveCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo).block();
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final ReactiveCosmosTemplate maxDegreeOfParallelismCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxDegreeOfParallelismCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final ReactiveCosmosTemplate maxBufferedItemCountCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxBufferedItemCountCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final ReactiveCosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = responseContinuationTokenLimitInKbCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final ReactiveCosmosTemplate queryMetricsEnabledCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = queryMetricsEnabledCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
No this is from the old tests before they were updated. @TheovanKraay please remove. | public void testPatch() {
insertedPerson = cosmosTemplate.patch(insertedPerson, operations, null);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge(), PATCH_AGE_1);
} | assertEquals(insertedPerson.getAge(), PATCH_AGE_1); | public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.replace("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
@Test
public void testPatchMultiOperations() {
insertedPerson = cosmosTemplate.patch(insertedPerson, multiPatchOperations, null);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(insertedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(insertedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(insertedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
insertedPerson = cosmosTemplate.patch(insertedPerson, operations,options);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge(), patchedPerson.getAge());
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
This check will always succeed as you are checking "a = a". Let's change the second param to the "Age" variable. | public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), patchedPerson.getAge());
} | assertEquals(patchedPerson.getAge(), patchedPerson.getAge()); | public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), patchedPerson.getAge());
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
Same as above. | public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), patchedPerson.getAge());
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
} | assertEquals(patchedPerson.getAge(), patchedPerson.getAge()); | public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), patchedPerson.getAge());
}
@Test
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
Changed to single block() | public void testPatchMultiOperations() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson, multiPatchOperations, null);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextCount(1).verifyComplete();
assertEquals(patchedPerson.block().getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.block().getHobbies(),PATCH_HOBBIES);
assertEquals(patchedPerson.block().getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.block().getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
} | assertEquals(patchedPerson.block().getAge().intValue(), (AGE + PATCH_AGE_INCREMENT)); | public void testPatchMultiOperations() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class).block();
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(),PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
} | class ReactiveCosmosTemplateIT {
private static final Person TEST_PERSON = new Person(TestConstants.ID_1, TestConstants.FIRST_NAME,
TestConstants.LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(TestConstants.ID_2, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(TestConstants.ID_3, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_4 = new Person(TestConstants.ID_4, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.replace("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final ReactiveIntegrationTestCollectionManager collectionManager = new ReactiveIntegrationTestCollectionManager();
@Value("${cosmos.secondaryKey}")
private String cosmosDbSecondaryKey;
@Value("${cosmos.key}")
private String cosmosDbKey;
private static CosmosAsyncClient client;
private static ReactiveCosmosTemplate cosmosTemplate;
private static String containerName;
private static CosmosEntityInformation<Person, String> personInfo;
private static AzureKeyCredential azureKeyCredential;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
@Autowired
private AuditableRepository auditableRepository;
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
azureKeyCredential = new AzureKeyCredential(cosmosDbKey);
cosmosClientBuilder.credential(azureKeyCredential);
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createReactiveCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class, GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON))).block();
}
private ReactiveCosmosTemplate createReactiveCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new ReactiveCosmosTemplate(cosmosFactory, config, cosmosConverter);
}
@After
public void cleanup() {
azureKeyCredential.update(cosmosDbKey);
}
@Test
public void testInsertDuplicateId() {
final Mono<Person> insertMono = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
StepVerifier.create(insertMono)
.expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof ConflictException)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByID() {
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIDBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById).consumeNextWith(actual -> {
Assert.assertEquals(actual.getFirstName(), TEST_PERSON.getFirstName());
Assert.assertEquals(actual.getLastName(), TEST_PERSON.getLastName());
}).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAll() {
final Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIdWithContainerName() {
StepVerifier.create(cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class))
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testInsert() {
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertWithContainerName() {
StepVerifier.create(cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2))))
.expectNext(TEST_PERSON_2).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
Mono<Person> entityMono = cosmosTemplate.insert(Person.class.getSimpleName(),
person, new PartitionKey(person.getLastName()));
StepVerifier.create(entityMono).verifyError(CosmosAccessException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final Mono<GenIdEntity> insertedEntityMono = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
GenIdEntity insertedEntity = insertedEntityMono.block();
assertThat(insertedEntity).isNotNull();
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testUpsert() {
final Person p = TEST_PERSON_2;
p.set_etag(insertedPerson.get_etag());
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testPatch() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson, operations, null);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextCount(1).verifyComplete();
assertEquals(patchedPerson.block().getAge(), PATCH_AGE_1);
}
@Test
@Test
public void testPatchPreConditionFail() {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Mono<Person> person = cosmosTemplate.patch(insertedPerson, operations, options);
StepVerifier.create(person).expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof PreconditionFailedException).verify();
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), TestConstants.UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(updated).block();
} catch (CosmosAccessException cosmosAccessException) {
assertThat(cosmosAccessException.getCosmosException()).isNotNull();
final Throwable cosmosClientException = cosmosAccessException.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
final Mono<Person> unmodifiedPerson =
cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
StepVerifier.create(unmodifiedPerson).expectNextMatches(person ->
person.getFirstName().equals(insertedPerson.getFirstName())).verifyComplete();
return;
}
fail();
}
@Test
public void testUpsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testUpsertWithContainerName() {
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(Person.class.getSimpleName(), p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByIdBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFind() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Flux<Person> personFlux = cosmosTemplate.find(query, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFlux).expectNextCount(1).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Flux<Person> personFluxIgnoreCase = cosmosTemplate.find(queryIgnoreCase, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFluxIgnoreCase).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Mono<Boolean> exists = cosmosTemplate.exists(query, Person.class, containerName);
StepVerifier.create(exists).expectNext(true).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Mono<Boolean> existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
StepVerifier.create(existsIgnoreCase).expectNext(true).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCount() {
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
}
@Test
public void testInvalidSecondaryKey() {
azureKeyCredential.update("Invalid secondary key");
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.expectError(CosmosAccessException.class)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
final Flux<Person> flux = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithReturnTypeContainingLocalDateTime() {
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = cosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
final Flux<Person> people = cosmosTemplate.find(new CosmosQuery(criteria), Person.class, containerName);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpecialChars() {
String ivoryCoastPassportId = PASSPORT_IDS_BY_COUNTRY.get("Côte d'Ivoire");
String subjectWithSpecialChars = "passportIdsByCountry[\"Côte d'Ivoire\"]";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpecialChars,
Collections.singletonList(ivoryCoastPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Flux<Person> people = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void createWithAutoscale() {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerResponse containerResponse = cosmosTemplate
.createContainerIfNotExists(autoScaleSampleInfo)
.block();
assertNotNull(containerResponse);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-other";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final ReactiveCosmosTemplate configuredThroughputCosmosTemplate = createReactiveCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo).block();
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final ReactiveCosmosTemplate maxDegreeOfParallelismCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxDegreeOfParallelismCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final ReactiveCosmosTemplate maxBufferedItemCountCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxBufferedItemCountCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final ReactiveCosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = responseContinuationTokenLimitInKbCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final ReactiveCosmosTemplate queryMetricsEnabledCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = queryMetricsEnabledCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class ReactiveCosmosTemplateIT {
private static final Person TEST_PERSON = new Person(TestConstants.ID_1, TestConstants.FIRST_NAME,
TestConstants.LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(TestConstants.ID_2, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(TestConstants.ID_3, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_4 = new Person(TestConstants.ID_4, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final ReactiveIntegrationTestCollectionManager collectionManager = new ReactiveIntegrationTestCollectionManager();
@Value("${cosmos.secondaryKey}")
private String cosmosDbSecondaryKey;
@Value("${cosmos.key}")
private String cosmosDbKey;
private static CosmosAsyncClient client;
private static ReactiveCosmosTemplate cosmosTemplate;
private static String containerName;
private static CosmosEntityInformation<Person, String> personInfo;
private static AzureKeyCredential azureKeyCredential;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
@Autowired
private AuditableRepository auditableRepository;
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
azureKeyCredential = new AzureKeyCredential(cosmosDbKey);
cosmosClientBuilder.credential(azureKeyCredential);
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createReactiveCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class, GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON))).block();
}
private ReactiveCosmosTemplate createReactiveCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new ReactiveCosmosTemplate(cosmosFactory, config, cosmosConverter);
}
@After
public void cleanup() {
azureKeyCredential.update(cosmosDbKey);
}
@Test
public void testInsertDuplicateId() {
final Mono<Person> insertMono = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
StepVerifier.create(insertMono)
.expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof ConflictException)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByID() {
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIDBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById).consumeNextWith(actual -> {
Assert.assertEquals(actual.getFirstName(), TEST_PERSON.getFirstName());
Assert.assertEquals(actual.getLastName(), TEST_PERSON.getLastName());
}).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAll() {
final Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIdWithContainerName() {
StepVerifier.create(cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class))
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testInsert() {
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertWithContainerName() {
StepVerifier.create(cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2))))
.expectNext(TEST_PERSON_2).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
Mono<Person> entityMono = cosmosTemplate.insert(Person.class.getSimpleName(),
person, new PartitionKey(person.getLastName()));
StepVerifier.create(entityMono).verifyError(CosmosAccessException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final Mono<GenIdEntity> insertedEntityMono = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
GenIdEntity insertedEntity = insertedEntityMono.block();
assertThat(insertedEntity).isNotNull();
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testUpsert() {
final Person p = TEST_PERSON_2;
p.set_etag(insertedPerson.get_etag());
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testPatch() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextMatches(person -> person.getAge() == PATCH_AGE_1).verifyComplete();
}
@Test
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Mono<Person> patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
StepVerifier.create(patchedPerson).expectNextMatches(person -> person.getAge() == PATCH_AGE_1).verifyComplete();
}
@Test
public void testPatchPreConditionFail() {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Mono<Person> person = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
StepVerifier.create(person).expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof PreconditionFailedException).verify();
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), TestConstants.UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(updated).block();
} catch (CosmosAccessException cosmosAccessException) {
assertThat(cosmosAccessException.getCosmosException()).isNotNull();
final Throwable cosmosClientException = cosmosAccessException.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
final Mono<Person> unmodifiedPerson =
cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
StepVerifier.create(unmodifiedPerson).expectNextMatches(person ->
person.getFirstName().equals(insertedPerson.getFirstName())).verifyComplete();
return;
}
fail();
}
@Test
public void testUpsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testUpsertWithContainerName() {
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(Person.class.getSimpleName(), p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByIdBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFind() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Flux<Person> personFlux = cosmosTemplate.find(query, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFlux).expectNextCount(1).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Flux<Person> personFluxIgnoreCase = cosmosTemplate.find(queryIgnoreCase, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFluxIgnoreCase).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Mono<Boolean> exists = cosmosTemplate.exists(query, Person.class, containerName);
StepVerifier.create(exists).expectNext(true).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Mono<Boolean> existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
StepVerifier.create(existsIgnoreCase).expectNext(true).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCount() {
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
}
@Test
public void testInvalidSecondaryKey() {
azureKeyCredential.update("Invalid secondary key");
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.expectError(CosmosAccessException.class)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
final Flux<Person> flux = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithReturnTypeContainingLocalDateTime() {
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = cosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
final Flux<Person> people = cosmosTemplate.find(new CosmosQuery(criteria), Person.class, containerName);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpecialChars() {
String ivoryCoastPassportId = PASSPORT_IDS_BY_COUNTRY.get("Côte d'Ivoire");
String subjectWithSpecialChars = "passportIdsByCountry[\"Côte d'Ivoire\"]";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpecialChars,
Collections.singletonList(ivoryCoastPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Flux<Person> people = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void createWithAutoscale() {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerResponse containerResponse = cosmosTemplate
.createContainerIfNotExists(autoScaleSampleInfo)
.block();
assertNotNull(containerResponse);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-other";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final ReactiveCosmosTemplate configuredThroughputCosmosTemplate = createReactiveCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo).block();
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final ReactiveCosmosTemplate maxDegreeOfParallelismCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxDegreeOfParallelismCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final ReactiveCosmosTemplate maxBufferedItemCountCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxBufferedItemCountCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final ReactiveCosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = responseContinuationTokenLimitInKbCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final ReactiveCosmosTemplate queryMetricsEnabledCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = queryMetricsEnabledCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
added | public void testPatch() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson, operations, null);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextCount(1).verifyComplete();
assertEquals(patchedPerson.block().getAge(), PATCH_AGE_1);
} | assertEquals(patchedPerson.block().getAge(), PATCH_AGE_1); | public void testPatch() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextMatches(person -> person.getAge() == PATCH_AGE_1).verifyComplete();
} | class ReactiveCosmosTemplateIT {
private static final Person TEST_PERSON = new Person(TestConstants.ID_1, TestConstants.FIRST_NAME,
TestConstants.LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(TestConstants.ID_2, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(TestConstants.ID_3, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_4 = new Person(TestConstants.ID_4, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.replace("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final ReactiveIntegrationTestCollectionManager collectionManager = new ReactiveIntegrationTestCollectionManager();
@Value("${cosmos.secondaryKey}")
private String cosmosDbSecondaryKey;
@Value("${cosmos.key}")
private String cosmosDbKey;
private static CosmosAsyncClient client;
private static ReactiveCosmosTemplate cosmosTemplate;
private static String containerName;
private static CosmosEntityInformation<Person, String> personInfo;
private static AzureKeyCredential azureKeyCredential;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
@Autowired
private AuditableRepository auditableRepository;
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
azureKeyCredential = new AzureKeyCredential(cosmosDbKey);
cosmosClientBuilder.credential(azureKeyCredential);
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createReactiveCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class, GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON))).block();
}
private ReactiveCosmosTemplate createReactiveCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new ReactiveCosmosTemplate(cosmosFactory, config, cosmosConverter);
}
@After
public void cleanup() {
azureKeyCredential.update(cosmosDbKey);
}
@Test
public void testInsertDuplicateId() {
final Mono<Person> insertMono = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
StepVerifier.create(insertMono)
.expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof ConflictException)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByID() {
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIDBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById).consumeNextWith(actual -> {
Assert.assertEquals(actual.getFirstName(), TEST_PERSON.getFirstName());
Assert.assertEquals(actual.getLastName(), TEST_PERSON.getLastName());
}).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAll() {
final Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIdWithContainerName() {
StepVerifier.create(cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class))
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testInsert() {
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertWithContainerName() {
StepVerifier.create(cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2))))
.expectNext(TEST_PERSON_2).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
Mono<Person> entityMono = cosmosTemplate.insert(Person.class.getSimpleName(),
person, new PartitionKey(person.getLastName()));
StepVerifier.create(entityMono).verifyError(CosmosAccessException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final Mono<GenIdEntity> insertedEntityMono = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
GenIdEntity insertedEntity = insertedEntityMono.block();
assertThat(insertedEntity).isNotNull();
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testUpsert() {
final Person p = TEST_PERSON_2;
p.set_etag(insertedPerson.get_etag());
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
@Test
public void testPatchMultiOperations() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson, multiPatchOperations, null);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Mono<Person> patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
StepVerifier.create(patchedPerson).expectNextCount(1).verifyComplete();
assertEquals(patchedPerson.block().getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.block().getHobbies(),PATCH_HOBBIES);
assertEquals(patchedPerson.block().getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.block().getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionFail() {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Mono<Person> person = cosmosTemplate.patch(insertedPerson, operations, options);
StepVerifier.create(person).expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof PreconditionFailedException).verify();
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), TestConstants.UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(updated).block();
} catch (CosmosAccessException cosmosAccessException) {
assertThat(cosmosAccessException.getCosmosException()).isNotNull();
final Throwable cosmosClientException = cosmosAccessException.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
final Mono<Person> unmodifiedPerson =
cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
StepVerifier.create(unmodifiedPerson).expectNextMatches(person ->
person.getFirstName().equals(insertedPerson.getFirstName())).verifyComplete();
return;
}
fail();
}
@Test
public void testUpsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testUpsertWithContainerName() {
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(Person.class.getSimpleName(), p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByIdBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFind() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Flux<Person> personFlux = cosmosTemplate.find(query, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFlux).expectNextCount(1).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Flux<Person> personFluxIgnoreCase = cosmosTemplate.find(queryIgnoreCase, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFluxIgnoreCase).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Mono<Boolean> exists = cosmosTemplate.exists(query, Person.class, containerName);
StepVerifier.create(exists).expectNext(true).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Mono<Boolean> existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
StepVerifier.create(existsIgnoreCase).expectNext(true).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCount() {
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
}
@Test
public void testInvalidSecondaryKey() {
azureKeyCredential.update("Invalid secondary key");
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.expectError(CosmosAccessException.class)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
final Flux<Person> flux = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithReturnTypeContainingLocalDateTime() {
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = cosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
final Flux<Person> people = cosmosTemplate.find(new CosmosQuery(criteria), Person.class, containerName);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpecialChars() {
String ivoryCoastPassportId = PASSPORT_IDS_BY_COUNTRY.get("Côte d'Ivoire");
String subjectWithSpecialChars = "passportIdsByCountry[\"Côte d'Ivoire\"]";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpecialChars,
Collections.singletonList(ivoryCoastPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Flux<Person> people = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void createWithAutoscale() {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerResponse containerResponse = cosmosTemplate
.createContainerIfNotExists(autoScaleSampleInfo)
.block();
assertNotNull(containerResponse);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-other";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final ReactiveCosmosTemplate configuredThroughputCosmosTemplate = createReactiveCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo).block();
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final ReactiveCosmosTemplate maxDegreeOfParallelismCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxDegreeOfParallelismCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final ReactiveCosmosTemplate maxBufferedItemCountCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxBufferedItemCountCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final ReactiveCosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = responseContinuationTokenLimitInKbCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final ReactiveCosmosTemplate queryMetricsEnabledCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = queryMetricsEnabledCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class ReactiveCosmosTemplateIT {
private static final Person TEST_PERSON = new Person(TestConstants.ID_1, TestConstants.FIRST_NAME,
TestConstants.LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(TestConstants.ID_2, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(TestConstants.ID_3, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_4 = new Person(TestConstants.ID_4, TestConstants.NEW_FIRST_NAME,
TestConstants.NEW_LAST_NAME, TestConstants.HOBBIES, TestConstants.ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final ReactiveIntegrationTestCollectionManager collectionManager = new ReactiveIntegrationTestCollectionManager();
@Value("${cosmos.secondaryKey}")
private String cosmosDbSecondaryKey;
@Value("${cosmos.key}")
private String cosmosDbKey;
private static CosmosAsyncClient client;
private static ReactiveCosmosTemplate cosmosTemplate;
private static String containerName;
private static CosmosEntityInformation<Person, String> personInfo;
private static AzureKeyCredential azureKeyCredential;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
@Autowired
private AuditableRepository auditableRepository;
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
azureKeyCredential = new AzureKeyCredential(cosmosDbKey);
cosmosClientBuilder.credential(azureKeyCredential);
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createReactiveCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class, GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON))).block();
}
private ReactiveCosmosTemplate createReactiveCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new ReactiveCosmosTemplate(cosmosFactory, config, cosmosConverter);
}
@After
public void cleanup() {
azureKeyCredential.update(cosmosDbKey);
}
@Test
public void testInsertDuplicateId() {
final Mono<Person> insertMono = cosmosTemplate.insert(TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
StepVerifier.create(insertMono)
.expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof ConflictException)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByID() {
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIDBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById).consumeNextWith(actual -> {
Assert.assertEquals(actual.getFirstName(), TEST_PERSON.getFirstName());
Assert.assertEquals(actual.getLastName(), TEST_PERSON.getLastName());
}).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAll() {
final Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindByIdWithContainerName() {
StepVerifier.create(cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class))
.consumeNextWith(actual -> Assert.assertEquals(actual, TEST_PERSON))
.verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testInsert() {
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
StepVerifier.create(cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3))))
.expectNext(TEST_PERSON_3).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertWithContainerName() {
StepVerifier.create(cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2))))
.expectNext(TEST_PERSON_2).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
Mono<Person> entityMono = cosmosTemplate.insert(Person.class.getSimpleName(),
person, new PartitionKey(person.getLastName()));
StepVerifier.create(entityMono).verifyError(CosmosAccessException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final Mono<GenIdEntity> insertedEntityMono = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
GenIdEntity insertedEntity = insertedEntityMono.block();
assertThat(insertedEntity).isNotNull();
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testUpsert() {
final Person p = TEST_PERSON_2;
p.set_etag(insertedPerson.get_etag());
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
@Test
public void testPatchMultiOperations() {
final Mono<Person> patch = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
StepVerifier.create(patch).expectNextCount(1).verifyComplete();
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class).block();
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(),PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Mono<Person> patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
StepVerifier.create(patchedPerson).expectNextMatches(person -> person.getAge() == PATCH_AGE_1).verifyComplete();
}
@Test
public void testPatchPreConditionFail() {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Mono<Person> person = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
StepVerifier.create(person).expectErrorMatches(ex -> ex instanceof CosmosAccessException &&
((CosmosAccessException) ex).getCosmosException() instanceof PreconditionFailedException).verify();
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), TestConstants.UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(updated).block();
} catch (CosmosAccessException cosmosAccessException) {
assertThat(cosmosAccessException.getCosmosException()).isNotNull();
final Throwable cosmosClientException = cosmosAccessException.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
final Mono<Person> unmodifiedPerson =
cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
StepVerifier.create(unmodifiedPerson).expectNextMatches(person ->
person.getFirstName().equals(insertedPerson.getFirstName())).verifyComplete();
return;
}
fail();
}
@Test
public void testUpsertBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testUpsertWithContainerName() {
final Person p = TEST_PERSON_2;
final ArrayList<String> hobbies = new ArrayList<>(p.getHobbies());
hobbies.add("more code");
p.setHobbies(hobbies);
final Mono<Person> upsert = cosmosTemplate.upsert(Person.class.getSimpleName(), p);
StepVerifier.create(upsert).expectNextCount(1).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Mono<Void> voidMono = cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
StepVerifier.create(voidMono).verifyComplete();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testDeleteByIdBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
cosmosTemplate.insert(TEST_PERSON_4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4))).block();
Flux<Person> flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(2).verifyComplete();
final Mono<Void> voidMono = cosmosTemplate.deleteById(Person.class.getSimpleName(),
TEST_PERSON_4.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
StepVerifier.create(voidMono).verifyComplete();
flux = cosmosTemplate.findAll(Person.class.getSimpleName(), Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFind() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Flux<Person> personFlux = cosmosTemplate.find(query, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFlux).expectNextCount(1).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Flux<Person> personFluxIgnoreCase = cosmosTemplate.find(queryIgnoreCase, Person.class,
Person.class.getSimpleName());
StepVerifier.create(personFluxIgnoreCase).expectNextCount(1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Mono<Boolean> exists = cosmosTemplate.exists(query, Person.class, containerName);
StepVerifier.create(exists).expectNext(true).verifyComplete();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Mono<Boolean> existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
StepVerifier.create(existsIgnoreCase).expectNext(true).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCount() {
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
Assertions.assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountBySecondaryKey() {
azureKeyCredential.update(cosmosDbSecondaryKey);
final Mono<Long> count = cosmosTemplate.count(containerName);
StepVerifier.create(count).expectNext((long) 1).verifyComplete();
}
@Test
public void testInvalidSecondaryKey() {
azureKeyCredential.update("Invalid secondary key");
final Mono<Person> findById = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(),
Person.class);
StepVerifier.create(findById)
.expectError(CosmosAccessException.class)
.verify();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
final Flux<Person> flux = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithReturnTypeContainingLocalDateTime() {
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = cosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
final Flux<Person> people = cosmosTemplate.find(new CosmosQuery(criteria), Person.class, containerName);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpecialChars() {
String ivoryCoastPassportId = PASSPORT_IDS_BY_COUNTRY.get("Côte d'Ivoire");
String subjectWithSpecialChars = "passportIdsByCountry[\"Côte d'Ivoire\"]";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpecialChars,
Collections.singletonList(ivoryCoastPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Flux<Person> people = cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class);
StepVerifier.create(people).expectNextCount(1).verifyComplete();
}
@Test
public void createWithAutoscale() {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerResponse containerResponse = cosmosTemplate
.createContainerIfNotExists(autoScaleSampleInfo)
.block();
assertNotNull(containerResponse);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-other";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final ReactiveCosmosTemplate configuredThroughputCosmosTemplate = createReactiveCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo).block();
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final ReactiveCosmosTemplate maxDegreeOfParallelismCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxDegreeOfParallelismCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final ReactiveCosmosTemplate maxBufferedItemCountCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = maxBufferedItemCountCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final ReactiveCosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = responseContinuationTokenLimitInKbCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final ReactiveCosmosTemplate queryMetricsEnabledCosmosTemplate = createReactiveCosmosTemplate(config, TestConstants.DB_NAME);
final AuditableEntity entity = new AuditableEntity();
entity.setId(UUID.randomUUID().toString());
auditableRepository.save(entity);
Criteria equals = Criteria.getInstance(CriteriaType.IS_EQUAL, "id", Collections.singletonList(entity.getId()), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(equals));
final Flux<AuditableEntity> flux = queryMetricsEnabledCosmosTemplate.runQuery(sqlQuerySpec, AuditableEntity.class, AuditableEntity.class);
StepVerifier.create(flux).expectNextCount(1).verifyComplete();
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
Done. | public void testPatch() {
insertedPerson = cosmosTemplate.patch(insertedPerson, operations, null);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge(), PATCH_AGE_1);
} | assertEquals(insertedPerson.getAge(), PATCH_AGE_1); | public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.replace("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
@Test
public void testPatchMultiOperations() {
insertedPerson = cosmosTemplate.patch(insertedPerson, multiPatchOperations, null);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(insertedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(insertedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(insertedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
insertedPerson = cosmosTemplate.patch(insertedPerson, operations,options);
Person patchedPerson = cosmosTemplate.findById(containerName, insertedPerson.getId(), Person.class);
assertEquals(insertedPerson.getAge(), patchedPerson.getAge());
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
Corrected this | public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), patchedPerson.getAge());
} | assertEquals(patchedPerson.getAge(), patchedPerson.getAge()); | public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), patchedPerson.getAge());
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
@Test
public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
corrected | public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), patchedPerson.getAge());
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
} | assertEquals(patchedPerson.getAge(), patchedPerson.getAge()); | public void testPatchPreConditionFail() {
try {
options.setFilterPredicate("FROM person p WHERE p.lastName = 'dummy'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(PreconditionFailedException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), patchedPerson.getAge());
}
@Test
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} | class CosmosTemplateIT {
private static final Person TEST_PERSON = new Person(ID_1, FIRST_NAME, LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_2 = new Person(ID_2, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final Person TEST_PERSON_3 = new Person(ID_3, NEW_FIRST_NAME, NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
private static final String PRECONDITION_IS_NOT_MET = "is not met";
private static final String WRONG_ETAG = "WRONG_ETAG";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final JsonNode NEW_PASSPORT_IDS_BY_COUNTRY_JSON = OBJECT_MAPPER.convertValue(NEW_PASSPORT_IDS_BY_COUNTRY, JsonNode.class);
private static final CosmosPatchOperations operations = CosmosPatchOperations
.create()
.replace("/age", PATCH_AGE_1);
CosmosPatchOperations multiPatchOperations = CosmosPatchOperations
.create()
.set("/firstName", PATCH_FIRST_NAME)
.replace("/passportIdsByCountry", NEW_PASSPORT_IDS_BY_COUNTRY_JSON)
.add("/hobbies/2", PATCH_HOBBY1)
.remove("/shippingAddresses/1")
.increment("/age", PATCH_AGE_INCREMENT);
private static final CosmosPatchItemRequestOptions options = new CosmosPatchItemRequestOptions();
@ClassRule
public static final IntegrationTestCollectionManager collectionManager = new IntegrationTestCollectionManager();
private static CosmosAsyncClient client;
private static CosmosTemplate cosmosTemplate;
private static CosmosEntityInformation<Person, String> personInfo;
private static String containerName;
private Person insertedPerson;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CosmosClientBuilder cosmosClientBuilder;
@Autowired
private CosmosConfig cosmosConfig;
@Autowired
private ResponseDiagnosticsTestUtils responseDiagnosticsTestUtils;
public CosmosTemplateIT() throws JsonProcessingException {
}
@Before
public void setUp() throws ClassNotFoundException {
if (cosmosTemplate == null) {
client = CosmosFactory.createCosmosAsyncClient(cosmosClientBuilder);
personInfo = new CosmosEntityInformation<>(Person.class);
containerName = personInfo.getContainerName();
cosmosTemplate = createCosmosTemplate(cosmosConfig, TestConstants.DB_NAME);
}
collectionManager.ensureContainersCreatedAndEmpty(cosmosTemplate, Person.class,
GenIdEntity.class, AuditableEntity.class);
insertedPerson = cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(TEST_PERSON.getLastName()));
}
private CosmosTemplate createCosmosTemplate(CosmosConfig config, String dbName) throws ClassNotFoundException {
final CosmosFactory cosmosFactory = new CosmosFactory(client, dbName);
final CosmosMappingContext mappingContext = new CosmosMappingContext();
mappingContext.setInitialEntitySet(new EntityScanner(this.applicationContext).scan(Persistent.class));
final MappingCosmosConverter cosmosConverter = new MappingCosmosConverter(mappingContext, null);
return new CosmosTemplate(cosmosFactory, config, cosmosConverter);
}
private void insertPerson(Person person) {
cosmosTemplate.insert(person,
new PartitionKey(personInfo.getPartitionKeyFieldValue(person)));
}
@Test
public void testInsertDuplicateIdShouldFailWithConflictException() {
try {
cosmosTemplate.insert(Person.class.getSimpleName(), TEST_PERSON,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
fail();
} catch (CosmosAccessException ex) {
assertThat(ex.getCosmosException()).isInstanceOf(ConflictException.class);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
}
@Test(expected = CosmosAccessException.class)
public void testInsertShouldFailIfColumnNotAnnotatedWithAutoGenerate() {
final Person person = new Person(null, FIRST_NAME, LAST_NAME, HOBBIES, ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(Person.class.getSimpleName(), person, new PartitionKey(person.getLastName()));
}
@Test
public void testInsertShouldGenerateIdIfColumnAnnotatedWithAutoGenerate() {
final GenIdEntity entity = new GenIdEntity(null, "foo");
final GenIdEntity insertedEntity = cosmosTemplate.insert(GenIdEntity.class.getSimpleName(),
entity, null);
assertThat(insertedEntity.getId()).isNotNull();
}
@Test
public void testFindAll() {
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class.getSimpleName(),
Person.class));
assertThat(result.size()).isEqualTo(1);
assertThat(result.get(0)).isEqualTo(TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindById() {
final Person result = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertEquals(result, TEST_PERSON);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Person nullResult = cosmosTemplate.findById(Person.class.getSimpleName(),
NOT_EXIST_ID, Person.class);
assertThat(nullResult).isNull();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
}
@Test
public void testFindByMultiIds() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
final List<Object> ids = Lists.newArrayList(ID_1, ID_2, ID_3);
final List<Person> result = TestUtils.toList(cosmosTemplate.findByIds(ids, Person.class, containerName));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
assertThat(result.size()).isEqualTo(expected.size());
assertThat(result).containsAll(expected);
}
@Test
public void testUpsertNewDocument() {
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
final String firstName = NEW_FIRST_NAME
+ "_"
+ UUID.randomUUID();
final Person newPerson = new Person(TEST_PERSON.getId(), firstName, NEW_FIRST_NAME, null, null,
AGE, PASSPORT_IDS_BY_COUNTRY);
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), newPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person.getFirstName(), firstName);
}
@Test
public void testUpdateWithReturnEntity() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person updatedPerson = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
final Person findPersonById = cosmosTemplate.findById(Person.class.getSimpleName(),
updatedPerson.getId(), Person.class);
assertEquals(updatedPerson, updated);
assertThat(updatedPerson.get_etag()).isEqualTo(findPersonById.get_etag());
}
@Test
public void testUpdate() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(insertedPerson.get_etag());
final Person person = cosmosTemplate.upsertAndReturnEntity(Person.class.getSimpleName(), updated);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
assertEquals(person, updated);
}
@Test
public void testPatch() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
public void testPatchMultiOperations() {
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, multiPatchOperations);
assertEquals(patchedPerson.getAge().intValue(), (AGE + PATCH_AGE_INCREMENT));
assertEquals(patchedPerson.getHobbies(), PATCH_HOBBIES);
assertEquals(patchedPerson.getFirstName(), PATCH_FIRST_NAME);
assertEquals(patchedPerson.getShippingAddresses().size(), 1);
assertEquals(patchedPerson.getPassportIdsByCountry(), NEW_PASSPORT_IDS_BY_COUNTRY);
}
@Test
public void testPatchPreConditionSuccess() {
options.setFilterPredicate("FROM person p WHERE p.lastName = '"+LAST_NAME+"'");
Person patchedPerson = cosmosTemplate.patch(insertedPerson.getId(), new PartitionKey(insertedPerson.getLastName()), Person.class, operations, options);
assertEquals(patchedPerson.getAge(), PATCH_AGE_1);
}
@Test
@Test
public void testOptimisticLockWhenUpdatingWithWrongEtag() {
final Person updated = new Person(TEST_PERSON.getId(), UPDATED_FIRST_NAME,
TEST_PERSON.getLastName(), TEST_PERSON.getHobbies(), TEST_PERSON.getShippingAddresses(),
AGE, PASSPORT_IDS_BY_COUNTRY);
updated.set_etag(WRONG_ETAG);
try {
cosmosTemplate.upsert(Person.class.getSimpleName(), updated);
} catch (CosmosAccessException e) {
assertThat(e.getCosmosException()).isNotNull();
final Throwable cosmosClientException = e.getCosmosException();
assertThat(cosmosClientException).isInstanceOf(CosmosException.class);
assertThat(cosmosClientException.getMessage()).contains(PRECONDITION_IS_NOT_MET);
assertThat(responseDiagnosticsTestUtils.getDiagnostics()).isNotNull();
final Person unmodifiedPerson = cosmosTemplate.findById(Person.class.getSimpleName(),
TEST_PERSON.getId(), Person.class);
assertThat(unmodifiedPerson.getFirstName()).isEqualTo(insertedPerson.getFirstName());
return;
}
fail();
}
@Test
public void testDeleteById() {
cosmosTemplate.insert(TEST_PERSON_2, null);
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteById(Person.class.getSimpleName(), TEST_PERSON.getId(),
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON_2);
}
@Test
public void testDeleteByEntity() {
Person insertedPerson = cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(TEST_PERSON_2.getLastName()));
assertThat(cosmosTemplate.count(Person.class.getSimpleName())).isEqualTo(2);
cosmosTemplate.deleteEntity(Person.class.getSimpleName(), insertedPerson);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final List<Person> result = TestUtils.toList(cosmosTemplate.findAll(Person.class));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
assertThat(result.size()).isEqualTo(1);
assertEquals(result.get(0), TEST_PERSON);
}
@Test
public void testCountByContainer() {
final long prevCount = cosmosTemplate.count(containerName);
assertThat(prevCount).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
final long newCount = cosmosTemplate.count(containerName);
assertThat(newCount).isEqualTo(2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testCountByQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = cosmosTemplate.count(query, containerName);
assertThat(count).isEqualTo(1);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON_2.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final long countIgnoreCase = cosmosTemplate.count(queryIgnoreCase, containerName);
assertThat(countIgnoreCase).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPages() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_1, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page1.getContent().size()).isEqualTo(PAGE_SIZE_1);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class,
containerName);
assertThat(page2.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllPageableMultiPagesPageSizeTwo() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final CosmosPageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final Page<Person> page1 = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
final List<Person> resultPage1 = TestUtils.toList(page1);
final List<Person> expected = Lists.newArrayList(TEST_PERSON, TEST_PERSON_2);
assertThat(resultPage1.size()).isEqualTo(expected.size());
assertThat(resultPage1).containsAll(expected);
PageTestUtils.validateNonLastPage(page1, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
final Page<Person> page2 = cosmosTemplate.findAll(page1.nextPageable(), Person.class, containerName);
final List<Person> resultPage2 = TestUtils.toList(page2);
final List<Person> expected2 = Lists.newArrayList(TEST_PERSON_3);
assertThat(resultPage2.size()).isEqualTo(expected2.size());
assertThat(resultPage2).containsAll(expected2);
PageTestUtils.validateLastPage(page2, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testPaginationQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Page<Person> page = cosmosTemplate.paginationQuery(query, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(page, PAGE_SIZE_2);
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME.toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase).with(pageRequest);
final Page<Person> pageIgnoreCase = cosmosTemplate.paginationQuery(queryIgnoreCase, Person.class,
containerName);
assertThat(pageIgnoreCase.getContent().size()).isEqualTo(1);
PageTestUtils.validateLastPage(pageIgnoreCase, PAGE_SIZE_2);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindWithSortAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withLimit(1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("barney");
}
@Test
public void testFindWithOffsetAndLimit() {
final Person testPerson4 = new Person("id_4", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson6 = new Person("id_6", "george", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
insertPerson(testPerson4);
insertPerson(testPerson5);
insertPerson(testPerson6);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "lastName",
Collections.singletonList(NEW_LAST_NAME), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery query = new CosmosQuery(criteria);
query.with(Sort.by(Sort.Direction.ASC, "firstName"));
final List<Person> result = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(result.size()).isEqualTo(3);
assertThat(result.get(0).getFirstName()).isEqualTo("barney");
assertThat(result.get(1).getFirstName()).isEqualTo("fred");
assertThat(result.get(2).getFirstName()).isEqualTo("george");
query.withOffsetAndLimit(1, 1);
final List<Person> resultWithLimit = TestUtils.toList(cosmosTemplate.find(query, Person.class, containerName));
assertThat(resultWithLimit.size()).isEqualTo(1);
assertThat(resultWithLimit.get(0).getFirstName()).isEqualTo("fred");
}
@Test
public void testFindAllWithPageableAndSort() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Sort sort = Sort.by(Sort.Direction.DESC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> page = cosmosTemplate.findAll(pageRequest, Person.class, containerName);
assertThat(page.getContent().size()).isEqualTo(3);
PageTestUtils.validateLastPage(page, PAGE_SIZE_3);
final List<Person> result = page.getContent();
assertThat(result.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(result.get(2).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testFindAllWithTwoPagesAndVerifySortOrder() {
final Person testPerson4 = new Person("id_4", "barney", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
final Person testPerson5 = new Person("id_5", "fred", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
cosmosTemplate.insert(testPerson4,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson4)));
cosmosTemplate.insert(testPerson5,
new PartitionKey(personInfo.getPartitionKeyFieldValue(testPerson5)));
final Sort sort = Sort.by(Sort.Direction.ASC, "firstName");
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_3, null, sort);
final Page<Person> firstPage = cosmosTemplate.findAll(pageRequest, Person.class,
containerName);
assertThat(firstPage.getContent().size()).isEqualTo(3);
PageTestUtils.validateNonLastPage(firstPage, firstPage.getContent().size());
final List<Person> firstPageResults = firstPage.getContent();
assertThat(firstPageResults.get(0).getFirstName()).isEqualTo(testPerson4.getFirstName());
assertThat(firstPageResults.get(1).getFirstName()).isEqualTo(FIRST_NAME);
assertThat(firstPageResults.get(2).getFirstName()).isEqualTo(testPerson5.getFirstName());
final Page<Person> secondPage = cosmosTemplate.findAll(firstPage.nextPageable(), Person.class,
containerName);
assertThat(secondPage.getContent().size()).isEqualTo(2);
PageTestUtils.validateLastPage(secondPage, PAGE_SIZE_3);
final List<Person> secondPageResults = secondPage.getContent();
assertThat(secondPageResults.get(0).getFirstName()).isEqualTo(NEW_FIRST_NAME);
assertThat(secondPageResults.get(1).getFirstName()).isEqualTo(NEW_FIRST_NAME);
}
@Test
public void testExists() {
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final Boolean exists = cosmosTemplate.exists(query, Person.class, containerName);
assertThat(exists).isTrue();
final Criteria criteriaIgnoreCase = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()), Part.IgnoreCaseType.ALWAYS);
final CosmosQuery queryIgnoreCase = new CosmosQuery(criteriaIgnoreCase);
final Boolean existsIgnoreCase = cosmosTemplate.exists(queryIgnoreCase, Person.class, containerName);
assertThat(existsIgnoreCase).isTrue();
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testArrayContainsCriteria() {
Criteria hasHobby = Criteria.getInstance(CriteriaType.ARRAY_CONTAINS, "hobbies",
Collections.singletonList(HOBBY1), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasHobby), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3);
Criteria containsNotCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "firstName",
Collections.singletonList("first"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
}
@Test
public void testContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria containsCaseSensitive = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
Criteria containsCaseSensitive2 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON_2);
Criteria containsCaseSensitive3 = Criteria.getInstance(CriteriaType.CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(containsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON_3);
}
@Test
public void testNotContainsCriteria() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Person TEST_PERSON_4 = new Person("id-4", "NEW_FIRST_NAME", NEW_LAST_NAME, HOBBIES,
ADDRESSES, AGE, PASSPORT_IDS_BY_COUNTRY);
cosmosTemplate.insert(TEST_PERSON_4, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_4)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("li"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3, TEST_PERSON_4);
Criteria notContainsNotCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "firstName",
Collections.singletonList("new"), Part.IgnoreCaseType.ALWAYS);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsNotCaseSensitive), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON);
}
@Test
public void testNotContainsCriteria2() {
cosmosTemplate.insert(TEST_PERSON_2, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
cosmosTemplate.insert(TEST_PERSON_3, new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_3)));
Criteria notContainsCaseSensitive = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("1"), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON_2, TEST_PERSON_3);
Criteria notContainsCaseSensitive2 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("2"), Part.IgnoreCaseType.NEVER);
List<Person> people2 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive2), Person.class,
containerName));
assertThat(people2).containsExactly(TEST_PERSON, TEST_PERSON_3);
Criteria notContainsCaseSensitive3 = Criteria.getInstance(CriteriaType.NOT_CONTAINING, "id",
Collections.singletonList("3"), Part.IgnoreCaseType.NEVER);
List<Person> people3 = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(notContainsCaseSensitive3), Person.class,
containerName));
assertThat(people3).containsExactly(TEST_PERSON, TEST_PERSON_2);
}
@Test
public void testIsNotNullCriteriaCaseSensitive() {
Criteria hasLastName = Criteria.getInstance(CriteriaType.IS_NOT_NULL, "lastName",
Collections.emptyList(),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(hasLastName), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStartsWithCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STARTS_WITH, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testIsEqualCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testStringEqualsCriteriaCaseSensitive() {
Criteria nameStartsWith = Criteria.getInstance(CriteriaType.STRING_EQUALS, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName().toUpperCase()),
Part.IgnoreCaseType.ALWAYS);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(nameStartsWith), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testBetweenCriteria() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(ageBetween), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testFindWithEqualCriteriaContainingNestedProperty() {
String postalCode = ADDRESSES.get(0).getPostalCode();
String subjectWithNestedProperty = "shippingAddresses[0]['postalCode']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithNestedProperty,
Collections.singletonList(postalCode), Part.IgnoreCaseType.NEVER);
List<Person> people = TestUtils.toList(cosmosTemplate.find(new CosmosQuery(criteria), Person.class,
containerName));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithEqualCriteriaContainingSpaces() {
String usaPassportId = PASSPORT_IDS_BY_COUNTRY.get("United States of America");
String subjectWithSpaces = "passportIdsByCountry['United States of America']";
Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, subjectWithSpaces,
Collections.singletonList(usaPassportId), Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testRunQueryWithSimpleReturnType() {
Criteria ageBetween = Criteria.getInstance(CriteriaType.BETWEEN, "age", Arrays.asList(AGE - 1, AGE + 1),
Part.IgnoreCaseType.NEVER);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(ageBetween));
List<Person> people = TestUtils.toList(cosmosTemplate.runQuery(sqlQuerySpec, Person.class, Person.class));
assertThat(people).containsExactly(TEST_PERSON);
}
@Test
public void testSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final CosmosQuery query = new CosmosQuery(criteria).with(pageRequest);
final Slice<Person> slice = cosmosTemplate.sliceQuery(query, Person.class, containerName);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void testRunSliceQuery() {
cosmosTemplate.insert(TEST_PERSON_2,
new PartitionKey(personInfo.getPartitionKeyFieldValue(TEST_PERSON_2)));
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNull();
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(FIRST_NAME), Part.IgnoreCaseType.NEVER);
final PageRequest pageRequest = new CosmosPageRequest(0, PAGE_SIZE_2, null);
final SqlQuerySpec sqlQuerySpec = new FindQuerySpecGenerator().generateCosmos(new CosmosQuery(criteria));
final Slice<Person> slice = cosmosTemplate.runSliceQuery(sqlQuerySpec, pageRequest, Person.class, Person.class);
assertThat(slice.getContent().size()).isEqualTo(1);
assertThat(responseDiagnosticsTestUtils.getCosmosDiagnostics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics()).isNotNull();
assertThat(responseDiagnosticsTestUtils.getCosmosResponseStatistics().getRequestCharge()).isGreaterThan(0);
}
@Test
public void createWithAutoscale() throws ClassNotFoundException {
final CosmosEntityInformation<AutoScaleSample, String> autoScaleSampleInfo =
new CosmosEntityInformation<>(AutoScaleSample.class);
CosmosContainerProperties containerProperties = cosmosTemplate.createContainerIfNotExists(autoScaleSampleInfo);
assertNotNull(containerProperties);
ThroughputResponse throughput = client.getDatabase(TestConstants.DB_NAME)
.getContainer(autoScaleSampleInfo.getContainerName())
.readThroughput()
.block();
assertNotNull(throughput);
assertEquals(Integer.parseInt(TestConstants.AUTOSCALE_MAX_THROUGHPUT),
throughput.getProperties().getAutoscaleMaxThroughput());
}
@Test
public void createDatabaseWithThroughput() throws ClassNotFoundException {
final String configuredThroughputDbName = TestConstants.DB_NAME + "-configured-throughput";
deleteDatabaseIfExists(configuredThroughputDbName);
Integer expectedRequestUnits = 700;
final CosmosConfig config = CosmosConfig.builder()
.enableDatabaseThroughput(false, expectedRequestUnits)
.build();
final CosmosTemplate configuredThroughputCosmosTemplate = createCosmosTemplate(config, configuredThroughputDbName);
final CosmosEntityInformation<Person, String> personInfo =
new CosmosEntityInformation<>(Person.class);
configuredThroughputCosmosTemplate.createContainerIfNotExists(personInfo);
final CosmosAsyncDatabase database = client.getDatabase(configuredThroughputDbName);
final ThroughputResponse response = database.readThroughput().block();
assertEquals(expectedRequestUnits, response.getProperties().getManualThroughput());
}
@Test
public void queryWithMaxDegreeOfParallelism() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxDegreeOfParallelism(20)
.build();
final CosmosTemplate maxDegreeOfParallelismCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxDegreeOfParallelismCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxDegreeOfParallelismCosmosTemplate, "maxDegreeOfParallelism"), 20);
}
@Test
public void queryWithMaxBufferedItemCount() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.maxBufferedItemCount(500)
.build();
final CosmosTemplate maxBufferedItemCountCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = maxBufferedItemCountCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(maxBufferedItemCountCosmosTemplate, "maxBufferedItemCount"), 500);
}
@Test
public void queryWithResponseContinuationTokenLimitInKb() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.responseContinuationTokenLimitInKb(2000)
.build();
final CosmosTemplate responseContinuationTokenLimitInKbCosmosTemplate =
createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = responseContinuationTokenLimitInKbCosmosTemplate.count(query, containerName);
assertEquals((int) ReflectionTestUtils.getField(responseContinuationTokenLimitInKbCosmosTemplate,
"responseContinuationTokenLimitInKb"), 2000);
}
@Test
public void queryDatabaseWithQueryMerticsEnabled() throws ClassNotFoundException {
final CosmosConfig config = CosmosConfig.builder()
.enableQueryMetrics(true)
.build();
final CosmosTemplate queryMetricsEnabledCosmosTemplate = createCosmosTemplate(config, TestConstants.DB_NAME);
final Criteria criteria = Criteria.getInstance(CriteriaType.IS_EQUAL, "firstName",
Collections.singletonList(TEST_PERSON.getFirstName()), Part.IgnoreCaseType.NEVER);
final CosmosQuery query = new CosmosQuery(criteria);
final long count = queryMetricsEnabledCosmosTemplate.count(query, containerName);
assertEquals((boolean) ReflectionTestUtils.getField(queryMetricsEnabledCosmosTemplate, "queryMetricsEnabled"), true);
}
@Test
public void userAgentSpringDataCosmosSuffix() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Method getUserAgentSuffix = CosmosClientBuilder.class.getDeclaredMethod("getUserAgentSuffix");
getUserAgentSuffix.setAccessible(true);
String userAgentSuffix = (String) getUserAgentSuffix.invoke(cosmosClientBuilder);
assertThat(userAgentSuffix).contains(Constants.USER_AGENT_SUFFIX);
assertThat(userAgentSuffix).contains(PropertyLoader.getProjectVersion());
}
private void deleteDatabaseIfExists(String dbName) {
CosmosAsyncDatabase database = client.getDatabase(dbName);
try {
database.delete().block();
} catch (CosmosException ex) {
assertEquals(ex.getStatusCode(), 404);
}
}
} |
I thought OS X used `tar.gz` as well | public String getExtension() {
return extension;
} | return extension; | public String getExtension() {
return extension;
} | class PlatformInfo {
private final String platform;
private final String extension;
private final String architecture;
PlatformInfo() {
String osName = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (osName.contains("windows")) {
platform = "win";
extension = "zip";
} else if (osName.contains("linux")) {
platform = "linux";
extension = "tar.gz";
} else {
platform = "osx";
extension = "zip";
}
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
if (arch.contains("amd64")) {
architecture = "x64";
} else if (arch.contains("arm64")) {
architecture = "arm64";
} else {
throw new RuntimeException("Unsupported platform x86");
}
}
public String getPlatform() {
return platform;
}
public String getArchitecture() {
return architecture;
}
} | class PlatformInfo {
private final String platform;
private final String extension;
private final String architecture;
PlatformInfo() {
String osName = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (osName.contains("windows")) {
platform = "win";
extension = "zip";
} else if (osName.contains("linux")) {
platform = "linux";
extension = "tar.gz";
} else if (osName.contains("mac os x")) {
platform = "osx";
extension = "zip";
} else {
throw new RuntimeException("unexpected osName " + osName);
}
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
if (arch.contains("amd64") || arch.contains("x86_64")) {
architecture = "x64";
} else if (arch.contains("arm64")) {
architecture = "arm64";
} else {
throw new RuntimeException("Unsupported platform " + arch);
}
}
public String getPlatform() {
return platform;
}
public String getArchitecture() {
return architecture;
}
} |
Would using linux as the default be safer than macOs? Changing this else if for macOs and the else for Linux | public String getArchitecture() {
return architecture;
} | return architecture; | public String getArchitecture() {
return architecture;
} | class PlatformInfo {
private final String platform;
private final String extension;
private final String architecture;
PlatformInfo() {
String osName = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (osName.contains("windows")) {
platform = "win";
extension = "zip";
} else if (osName.contains("linux")) {
platform = "linux";
extension = "tar.gz";
} else {
platform = "osx";
extension = "zip";
}
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
if (arch.contains("amd64")) {
architecture = "x64";
} else if (arch.contains("arm64")) {
architecture = "arm64";
} else {
throw new RuntimeException("Unsupported platform x86");
}
}
public String getPlatform() {
return platform;
}
public String getExtension() {
return extension;
}
} | class PlatformInfo {
private final String platform;
private final String extension;
private final String architecture;
PlatformInfo() {
String osName = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (osName.contains("windows")) {
platform = "win";
extension = "zip";
} else if (osName.contains("linux")) {
platform = "linux";
extension = "tar.gz";
} else if (osName.contains("mac os x")) {
platform = "osx";
extension = "zip";
} else {
throw new RuntimeException("unexpected osName " + osName);
}
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
if (arch.contains("amd64") || arch.contains("x86_64")) {
architecture = "x64";
} else if (arch.contains("arm64")) {
architecture = "arm64";
} else {
throw new RuntimeException("Unsupported platform " + arch);
}
}
public String getPlatform() {
return platform;
}
public String getExtension() {
return extension;
}
} |
In this case the [release](https://github.com/Azure/azure-sdk-tools/releases/tag/test-proxy_1.0.0-dev.20221212.2) for the test proxy is a zip file on Mac. | public String getExtension() {
return extension;
} | return extension; | public String getExtension() {
return extension;
} | class PlatformInfo {
private final String platform;
private final String extension;
private final String architecture;
PlatformInfo() {
String osName = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (osName.contains("windows")) {
platform = "win";
extension = "zip";
} else if (osName.contains("linux")) {
platform = "linux";
extension = "tar.gz";
} else {
platform = "osx";
extension = "zip";
}
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
if (arch.contains("amd64")) {
architecture = "x64";
} else if (arch.contains("arm64")) {
architecture = "arm64";
} else {
throw new RuntimeException("Unsupported platform x86");
}
}
public String getPlatform() {
return platform;
}
public String getArchitecture() {
return architecture;
}
} | class PlatformInfo {
private final String platform;
private final String extension;
private final String architecture;
PlatformInfo() {
String osName = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (osName.contains("windows")) {
platform = "win";
extension = "zip";
} else if (osName.contains("linux")) {
platform = "linux";
extension = "tar.gz";
} else if (osName.contains("mac os x")) {
platform = "osx";
extension = "zip";
} else {
throw new RuntimeException("unexpected osName " + osName);
}
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
if (arch.contains("amd64") || arch.contains("x86_64")) {
architecture = "x64";
} else if (arch.contains("arm64")) {
architecture = "arm64";
} else {
throw new RuntimeException("Unsupported platform " + arch);
}
}
public String getPlatform() {
return platform;
}
public String getArchitecture() {
return architecture;
}
} |
Good callout. I just hadn't grabbed my Mac from the other room the day I wrote this to validate what the `osName` is. :) Updated this to not have a default, which is what I really want. | public String getArchitecture() {
return architecture;
} | return architecture; | public String getArchitecture() {
return architecture;
} | class PlatformInfo {
private final String platform;
private final String extension;
private final String architecture;
PlatformInfo() {
String osName = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (osName.contains("windows")) {
platform = "win";
extension = "zip";
} else if (osName.contains("linux")) {
platform = "linux";
extension = "tar.gz";
} else {
platform = "osx";
extension = "zip";
}
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
if (arch.contains("amd64")) {
architecture = "x64";
} else if (arch.contains("arm64")) {
architecture = "arm64";
} else {
throw new RuntimeException("Unsupported platform x86");
}
}
public String getPlatform() {
return platform;
}
public String getExtension() {
return extension;
}
} | class PlatformInfo {
private final String platform;
private final String extension;
private final String architecture;
PlatformInfo() {
String osName = System.getProperty("os.name").toLowerCase(Locale.ROOT);
if (osName.contains("windows")) {
platform = "win";
extension = "zip";
} else if (osName.contains("linux")) {
platform = "linux";
extension = "tar.gz";
} else if (osName.contains("mac os x")) {
platform = "osx";
extension = "zip";
} else {
throw new RuntimeException("unexpected osName " + osName);
}
String arch = System.getProperty("os.arch").toLowerCase(Locale.ROOT);
if (arch.contains("amd64") || arch.contains("x86_64")) {
architecture = "x64";
} else if (arch.contains("arm64")) {
architecture = "arm64";
} else {
throw new RuntimeException("Unsupported platform " + arch);
}
}
public String getPlatform() {
return platform;
}
public String getExtension() {
return extension;
}
} |
Will this request for one `.json` record file at a time? Is there a way to download all `.json` files for a particular library with a single request? | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization();
addMatcherRequests();
String body = response.getBodyAsString().block();
return SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet().stream().sorted(Comparator.comparingInt(e -> Integer.parseInt(e.getKey()))).map(Map.Entry::getValue).collect(Collectors.toCollection(LinkedList::new));
} catch (IOException e) {
throw new RuntimeException(e);
}
} | .setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile)); | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
checkForTestProxyErrors(response);
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization(this.sanitizers);
addMatcherRequests(this.matchers);
String body = response.getBodyAsString().block();
List<Map.Entry<String, String>> toSort;
toSort = new ArrayList<>(SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet());
toSort.sort(Comparator.comparingInt(e -> Integer.parseInt(e.getKey())));
LinkedList<String> strings = new LinkedList<>();
for (Map.Entry<String, String> stringStringEntry : toSort) {
String value = stringStringEntry.getValue();
strings.add(value);
}
return strings;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private static final List<TestProxyMatcher> DEFAULT_MATCHERS = loadMatchers();
private final List<TestProxyMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*
* @param customSanitizers the list of custom sanitizers to be added to {@link TestProxyPlaybackClient}
*/
public TestProxyPlaybackClient(List<TestProxySanitizer> customSanitizers, List<TestProxyMatcher> customMatcher) {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
this.sanitizers.addAll(customSanitizers == null ? Collections.emptyList() : customSanitizers);
this.matchers.addAll(DEFAULT_MATCHERS);
this.matchers.addAll(customMatcher == null ? Collections.emptyList() : customMatcher);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws RuntimeException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request);
}
private void addProxySanitization() {
getSanitizerRequests(this.sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
private void addMatcherRequests() {
getMatcherRequests(this.matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private final List<TestProxyRequestMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*/
public TestProxyPlaybackClient() {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws UncheckedIOException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request).map(response -> {
TestProxyUtils.checkForTestProxyErrors(response);
return response;
});
}
/**
* Redirects the request to the test-proxy to retrieve the playback response synchronously.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public HttpResponse sendSync(HttpRequest request, Context context) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
HttpResponse response = client.sendSync(request, context);
TestProxyUtils.checkForTestProxyErrors(response);
return response;
}
/**
* Add a list of {@link TestProxySanitizer} to the current playback session.
* @param sanitizers The sanitizers to add.
*/
public void addProxySanitization(List<TestProxySanitizer> sanitizers) {
if (isPlayingBack()) {
getSanitizerRequests(sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.sanitizers.addAll(sanitizers);
}
}
/**
* Add a list of {@link TestProxyRequestMatcher} to the current playback session.
* @param matchers The matchers to add.
*/
public void addMatcherRequests(List<TestProxyRequestMatcher> matchers) {
if (isPlayingBack()) {
getMatcherRequests(matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.matchers.addAll(matchers);
}
}
private boolean isPlayingBack() {
return xRecordingId != null;
}
} |
Use `UncheckedIOException` instead for `RuntimeException` when wrapping IOException. | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization();
addMatcherRequests();
String body = response.getBodyAsString().block();
return SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet().stream().sorted(Comparator.comparingInt(e -> Integer.parseInt(e.getKey()))).map(Map.Entry::getValue).collect(Collectors.toCollection(LinkedList::new));
} catch (IOException e) {
throw new RuntimeException(e);
}
} | throw new RuntimeException(e); | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
checkForTestProxyErrors(response);
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization(this.sanitizers);
addMatcherRequests(this.matchers);
String body = response.getBodyAsString().block();
List<Map.Entry<String, String>> toSort;
toSort = new ArrayList<>(SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet());
toSort.sort(Comparator.comparingInt(e -> Integer.parseInt(e.getKey())));
LinkedList<String> strings = new LinkedList<>();
for (Map.Entry<String, String> stringStringEntry : toSort) {
String value = stringStringEntry.getValue();
strings.add(value);
}
return strings;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private static final List<TestProxyMatcher> DEFAULT_MATCHERS = loadMatchers();
private final List<TestProxyMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*
* @param customSanitizers the list of custom sanitizers to be added to {@link TestProxyPlaybackClient}
*/
public TestProxyPlaybackClient(List<TestProxySanitizer> customSanitizers, List<TestProxyMatcher> customMatcher) {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
this.sanitizers.addAll(customSanitizers == null ? Collections.emptyList() : customSanitizers);
this.matchers.addAll(DEFAULT_MATCHERS);
this.matchers.addAll(customMatcher == null ? Collections.emptyList() : customMatcher);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws RuntimeException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request);
}
private void addProxySanitization() {
getSanitizerRequests(this.sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
private void addMatcherRequests() {
getMatcherRequests(this.matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private final List<TestProxyRequestMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*/
public TestProxyPlaybackClient() {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws UncheckedIOException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request).map(response -> {
TestProxyUtils.checkForTestProxyErrors(response);
return response;
});
}
/**
* Redirects the request to the test-proxy to retrieve the playback response synchronously.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public HttpResponse sendSync(HttpRequest request, Context context) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
HttpResponse response = client.sendSync(request, context);
TestProxyUtils.checkForTestProxyErrors(response);
return response;
}
/**
* Add a list of {@link TestProxySanitizer} to the current playback session.
* @param sanitizers The sanitizers to add.
*/
public void addProxySanitization(List<TestProxySanitizer> sanitizers) {
if (isPlayingBack()) {
getSanitizerRequests(sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.sanitizers.addAll(sanitizers);
}
}
/**
* Add a list of {@link TestProxyRequestMatcher} to the current playback session.
* @param matchers The matchers to add.
*/
public void addMatcherRequests(List<TestProxyRequestMatcher> matchers) {
if (isPlayingBack()) {
getMatcherRequests(matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.matchers.addAll(matchers);
}
}
private boolean isPlayingBack() {
return xRecordingId != null;
}
} |
Is this recordingId per test recording file? Since this the HttpClient, we should not make this test specific to allow for this client to be reusable and not instantiate this multiple times. | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization();
addMatcherRequests();
String body = response.getBodyAsString().block();
return SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet().stream().sorted(Comparator.comparingInt(e -> Integer.parseInt(e.getKey()))).map(Map.Entry::getValue).collect(Collectors.toCollection(LinkedList::new));
} catch (IOException e) {
throw new RuntimeException(e);
}
} | xRecordingId = response.getHeaderValue("x-recording-id"); | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
checkForTestProxyErrors(response);
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization(this.sanitizers);
addMatcherRequests(this.matchers);
String body = response.getBodyAsString().block();
List<Map.Entry<String, String>> toSort;
toSort = new ArrayList<>(SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet());
toSort.sort(Comparator.comparingInt(e -> Integer.parseInt(e.getKey())));
LinkedList<String> strings = new LinkedList<>();
for (Map.Entry<String, String> stringStringEntry : toSort) {
String value = stringStringEntry.getValue();
strings.add(value);
}
return strings;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private static final List<TestProxyMatcher> DEFAULT_MATCHERS = loadMatchers();
private final List<TestProxyMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*
* @param customSanitizers the list of custom sanitizers to be added to {@link TestProxyPlaybackClient}
*/
public TestProxyPlaybackClient(List<TestProxySanitizer> customSanitizers, List<TestProxyMatcher> customMatcher) {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
this.sanitizers.addAll(customSanitizers == null ? Collections.emptyList() : customSanitizers);
this.matchers.addAll(DEFAULT_MATCHERS);
this.matchers.addAll(customMatcher == null ? Collections.emptyList() : customMatcher);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws RuntimeException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request);
}
private void addProxySanitization() {
getSanitizerRequests(this.sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
private void addMatcherRequests() {
getMatcherRequests(this.matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private final List<TestProxyRequestMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*/
public TestProxyPlaybackClient() {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws UncheckedIOException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request).map(response -> {
TestProxyUtils.checkForTestProxyErrors(response);
return response;
});
}
/**
* Redirects the request to the test-proxy to retrieve the playback response synchronously.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public HttpResponse sendSync(HttpRequest request, Context context) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
HttpResponse response = client.sendSync(request, context);
TestProxyUtils.checkForTestProxyErrors(response);
return response;
}
/**
* Add a list of {@link TestProxySanitizer} to the current playback session.
* @param sanitizers The sanitizers to add.
*/
public void addProxySanitization(List<TestProxySanitizer> sanitizers) {
if (isPlayingBack()) {
getSanitizerRequests(sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.sanitizers.addAll(sanitizers);
}
}
/**
* Add a list of {@link TestProxyRequestMatcher} to the current playback session.
* @param matchers The matchers to add.
*/
public void addMatcherRequests(List<TestProxyRequestMatcher> matchers) {
if (isPlayingBack()) {
getMatcherRequests(matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.matchers.addAll(matchers);
}
}
private boolean isPlayingBack() {
return xRecordingId != null;
}
} |
What does `/playback/stop` request do since `playback/start` would have returned the full recorded file? | public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
} | client.sendSync(request, Context.NONE); | public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private static final List<TestProxyMatcher> DEFAULT_MATCHERS = loadMatchers();
private final List<TestProxyMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*
* @param customSanitizers the list of custom sanitizers to be added to {@link TestProxyPlaybackClient}
*/
public TestProxyPlaybackClient(List<TestProxySanitizer> customSanitizers, List<TestProxyMatcher> customMatcher) {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
this.sanitizers.addAll(customSanitizers == null ? Collections.emptyList() : customSanitizers);
this.matchers.addAll(DEFAULT_MATCHERS);
this.matchers.addAll(customMatcher == null ? Collections.emptyList() : customMatcher);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws RuntimeException if an {@link IOException} is thrown.
*/
public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization();
addMatcherRequests();
String body = response.getBodyAsString().block();
return SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet().stream().sorted(Comparator.comparingInt(e -> Integer.parseInt(e.getKey()))).map(Map.Entry::getValue).collect(Collectors.toCollection(LinkedList::new));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Stops playback of a test recording.
*/
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request);
}
private void addProxySanitization() {
getSanitizerRequests(this.sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
private void addMatcherRequests() {
getMatcherRequests(this.matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private final List<TestProxyRequestMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*/
public TestProxyPlaybackClient() {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws UncheckedIOException if an {@link IOException} is thrown.
*/
public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
checkForTestProxyErrors(response);
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization(this.sanitizers);
addMatcherRequests(this.matchers);
String body = response.getBodyAsString().block();
List<Map.Entry<String, String>> toSort;
toSort = new ArrayList<>(SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet());
toSort.sort(Comparator.comparingInt(e -> Integer.parseInt(e.getKey())));
LinkedList<String> strings = new LinkedList<>();
for (Map.Entry<String, String> stringStringEntry : toSort) {
String value = stringStringEntry.getValue();
strings.add(value);
}
return strings;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Stops playback of a test recording.
*/
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request).map(response -> {
TestProxyUtils.checkForTestProxyErrors(response);
return response;
});
}
/**
* Redirects the request to the test-proxy to retrieve the playback response synchronously.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public HttpResponse sendSync(HttpRequest request, Context context) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
HttpResponse response = client.sendSync(request, context);
TestProxyUtils.checkForTestProxyErrors(response);
return response;
}
/**
* Add a list of {@link TestProxySanitizer} to the current playback session.
* @param sanitizers The sanitizers to add.
*/
public void addProxySanitization(List<TestProxySanitizer> sanitizers) {
if (isPlayingBack()) {
getSanitizerRequests(sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.sanitizers.addAll(sanitizers);
}
}
/**
* Add a list of {@link TestProxyRequestMatcher} to the current playback session.
* @param matchers The matchers to add.
*/
public void addMatcherRequests(List<TestProxyRequestMatcher> matchers) {
if (isPlayingBack()) {
getMatcherRequests(matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.matchers.addAll(matchers);
}
}
private boolean isPlayingBack() {
return xRecordingId != null;
}
} |
Everything will get cloned locally at once. After that, this is just the key to tell it which file to read. | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization();
addMatcherRequests();
String body = response.getBodyAsString().block();
return SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet().stream().sorted(Comparator.comparingInt(e -> Integer.parseInt(e.getKey()))).map(Map.Entry::getValue).collect(Collectors.toCollection(LinkedList::new));
} catch (IOException e) {
throw new RuntimeException(e);
}
} | .setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile)); | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
checkForTestProxyErrors(response);
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization(this.sanitizers);
addMatcherRequests(this.matchers);
String body = response.getBodyAsString().block();
List<Map.Entry<String, String>> toSort;
toSort = new ArrayList<>(SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet());
toSort.sort(Comparator.comparingInt(e -> Integer.parseInt(e.getKey())));
LinkedList<String> strings = new LinkedList<>();
for (Map.Entry<String, String> stringStringEntry : toSort) {
String value = stringStringEntry.getValue();
strings.add(value);
}
return strings;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private static final List<TestProxyMatcher> DEFAULT_MATCHERS = loadMatchers();
private final List<TestProxyMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*
* @param customSanitizers the list of custom sanitizers to be added to {@link TestProxyPlaybackClient}
*/
public TestProxyPlaybackClient(List<TestProxySanitizer> customSanitizers, List<TestProxyMatcher> customMatcher) {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
this.sanitizers.addAll(customSanitizers == null ? Collections.emptyList() : customSanitizers);
this.matchers.addAll(DEFAULT_MATCHERS);
this.matchers.addAll(customMatcher == null ? Collections.emptyList() : customMatcher);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws RuntimeException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request);
}
private void addProxySanitization() {
getSanitizerRequests(this.sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
private void addMatcherRequests() {
getMatcherRequests(this.matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private final List<TestProxyRequestMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*/
public TestProxyPlaybackClient() {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws UncheckedIOException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request).map(response -> {
TestProxyUtils.checkForTestProxyErrors(response);
return response;
});
}
/**
* Redirects the request to the test-proxy to retrieve the playback response synchronously.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public HttpResponse sendSync(HttpRequest request, Context context) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
HttpResponse response = client.sendSync(request, context);
TestProxyUtils.checkForTestProxyErrors(response);
return response;
}
/**
* Add a list of {@link TestProxySanitizer} to the current playback session.
* @param sanitizers The sanitizers to add.
*/
public void addProxySanitization(List<TestProxySanitizer> sanitizers) {
if (isPlayingBack()) {
getSanitizerRequests(sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.sanitizers.addAll(sanitizers);
}
}
/**
* Add a list of {@link TestProxyRequestMatcher} to the current playback session.
* @param matchers The matchers to add.
*/
public void addMatcherRequests(List<TestProxyRequestMatcher> matchers) {
if (isPlayingBack()) {
getMatcherRequests(matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.matchers.addAll(matchers);
}
}
private boolean isPlayingBack() {
return xRecordingId != null;
}
} |
The recordingId is per test. It's the key used so the test proxy can keep track of more than one recording at a time. We are re-using the same client for all requests, and just making new request/responses per test. I think that's reasonable? | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization();
addMatcherRequests();
String body = response.getBodyAsString().block();
return SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet().stream().sorted(Comparator.comparingInt(e -> Integer.parseInt(e.getKey()))).map(Map.Entry::getValue).collect(Collectors.toCollection(LinkedList::new));
} catch (IOException e) {
throw new RuntimeException(e);
}
} | xRecordingId = response.getHeaderValue("x-recording-id"); | public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
checkForTestProxyErrors(response);
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization(this.sanitizers);
addMatcherRequests(this.matchers);
String body = response.getBodyAsString().block();
List<Map.Entry<String, String>> toSort;
toSort = new ArrayList<>(SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet());
toSort.sort(Comparator.comparingInt(e -> Integer.parseInt(e.getKey())));
LinkedList<String> strings = new LinkedList<>();
for (Map.Entry<String, String> stringStringEntry : toSort) {
String value = stringStringEntry.getValue();
strings.add(value);
}
return strings;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private static final List<TestProxyMatcher> DEFAULT_MATCHERS = loadMatchers();
private final List<TestProxyMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*
* @param customSanitizers the list of custom sanitizers to be added to {@link TestProxyPlaybackClient}
*/
public TestProxyPlaybackClient(List<TestProxySanitizer> customSanitizers, List<TestProxyMatcher> customMatcher) {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
this.sanitizers.addAll(customSanitizers == null ? Collections.emptyList() : customSanitizers);
this.matchers.addAll(DEFAULT_MATCHERS);
this.matchers.addAll(customMatcher == null ? Collections.emptyList() : customMatcher);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws RuntimeException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request);
}
private void addProxySanitization() {
getSanitizerRequests(this.sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
private void addMatcherRequests() {
getMatcherRequests(this.matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private final List<TestProxyRequestMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*/
public TestProxyPlaybackClient() {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws UncheckedIOException if an {@link IOException} is thrown.
*/
/**
* Stops playback of a test recording.
*/
public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
}
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request).map(response -> {
TestProxyUtils.checkForTestProxyErrors(response);
return response;
});
}
/**
* Redirects the request to the test-proxy to retrieve the playback response synchronously.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public HttpResponse sendSync(HttpRequest request, Context context) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
HttpResponse response = client.sendSync(request, context);
TestProxyUtils.checkForTestProxyErrors(response);
return response;
}
/**
* Add a list of {@link TestProxySanitizer} to the current playback session.
* @param sanitizers The sanitizers to add.
*/
public void addProxySanitization(List<TestProxySanitizer> sanitizers) {
if (isPlayingBack()) {
getSanitizerRequests(sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.sanitizers.addAll(sanitizers);
}
}
/**
* Add a list of {@link TestProxyRequestMatcher} to the current playback session.
* @param matchers The matchers to add.
*/
public void addMatcherRequests(List<TestProxyRequestMatcher> matchers) {
if (isPlayingBack()) {
getMatcherRequests(matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.matchers.addAll(matchers);
}
}
private boolean isPlayingBack() {
return xRecordingId != null;
}
} |
That's not how the proxy works. The file isn't returned over the wire. Individual requests are answered. `playback/start` signals that we are going to start sending requests that should match a playback file. Then the test sends requests as it would in a live test and the proxy responds with what is in the file. `playback/stop` is so the proxy can free things for that test when it is done. | public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
} | client.sendSync(request, Context.NONE); | public void stopPlayback() {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/stop", TestProxyUtils.getProxyUrl()))
.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private static final List<TestProxyMatcher> DEFAULT_MATCHERS = loadMatchers();
private final List<TestProxyMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*
* @param customSanitizers the list of custom sanitizers to be added to {@link TestProxyPlaybackClient}
*/
public TestProxyPlaybackClient(List<TestProxySanitizer> customSanitizers, List<TestProxyMatcher> customMatcher) {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
this.sanitizers.addAll(customSanitizers == null ? Collections.emptyList() : customSanitizers);
this.matchers.addAll(DEFAULT_MATCHERS);
this.matchers.addAll(customMatcher == null ? Collections.emptyList() : customMatcher);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws RuntimeException if an {@link IOException} is thrown.
*/
public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization();
addMatcherRequests();
String body = response.getBodyAsString().block();
return SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet().stream().sorted(Comparator.comparingInt(e -> Integer.parseInt(e.getKey()))).map(Map.Entry::getValue).collect(Collectors.toCollection(LinkedList::new));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Stops playback of a test recording.
*/
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request);
}
private void addProxySanitization() {
getSanitizerRequests(this.sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
private void addMatcherRequests() {
getMatcherRequests(this.matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
}
} | class TestProxyPlaybackClient implements HttpClient {
private final HttpURLConnectionHttpClient client = new HttpURLConnectionHttpClient();
private String xRecordingId;
private static final SerializerAdapter SERIALIZER = new JacksonAdapter();
private static final List<TestProxySanitizer> DEFAULT_SANITIZERS = loadSanitizers();
private final List<TestProxySanitizer> sanitizers = new ArrayList<>();
private final List<TestProxyRequestMatcher> matchers = new ArrayList<>();
/**
* Create an instance of {@link TestProxyPlaybackClient} with a list of custom sanitizers.
*/
public TestProxyPlaybackClient() {
this.sanitizers.addAll(DEFAULT_SANITIZERS);
}
/**
* Starts playback of a test recording.
* @param recordFile The name of the file to read.
* @return A {@link Queue} representing the variables in the recording.
* @throws UncheckedIOException if an {@link IOException} is thrown.
*/
public Queue<String> startPlayback(String recordFile) {
HttpRequest request = new HttpRequest(HttpMethod.POST, String.format("%s/playback/start", TestProxyUtils.getProxyUrl()))
.setBody(String.format("{\"x-recording-file\": \"%s\"}", recordFile));
try (HttpResponse response = client.sendSync(request, Context.NONE)) {
checkForTestProxyErrors(response);
xRecordingId = response.getHeaderValue("x-recording-id");
addProxySanitization(this.sanitizers);
addMatcherRequests(this.matchers);
String body = response.getBodyAsString().block();
List<Map.Entry<String, String>> toSort;
toSort = new ArrayList<>(SERIALIZER.<Map<String, String>>deserialize(body, Map.class, SerializerEncoding.JSON).entrySet());
toSort.sort(Comparator.comparingInt(e -> Integer.parseInt(e.getKey())));
LinkedList<String> strings = new LinkedList<>();
for (Map.Entry<String, String> stringStringEntry : toSort) {
String value = stringStringEntry.getValue();
strings.add(value);
}
return strings;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Stops playback of a test recording.
*/
/**
* Redirects the request to the test-proxy to retrieve the playback response.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public Mono<HttpResponse> send(HttpRequest request) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
return client.send(request).map(response -> {
TestProxyUtils.checkForTestProxyErrors(response);
return response;
});
}
/**
* Redirects the request to the test-proxy to retrieve the playback response synchronously.
* @param request The HTTP request to send.
* @return The HTTP response.
*/
@Override
public HttpResponse sendSync(HttpRequest request, Context context) {
if (xRecordingId == null) {
throw new RuntimeException("Playback was not started before a request was sent.");
}
TestProxyUtils.changeHeaders(request, xRecordingId, "playback");
HttpResponse response = client.sendSync(request, context);
TestProxyUtils.checkForTestProxyErrors(response);
return response;
}
/**
* Add a list of {@link TestProxySanitizer} to the current playback session.
* @param sanitizers The sanitizers to add.
*/
public void addProxySanitization(List<TestProxySanitizer> sanitizers) {
if (isPlayingBack()) {
getSanitizerRequests(sanitizers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.sanitizers.addAll(sanitizers);
}
}
/**
* Add a list of {@link TestProxyRequestMatcher} to the current playback session.
* @param matchers The matchers to add.
*/
public void addMatcherRequests(List<TestProxyRequestMatcher> matchers) {
if (isPlayingBack()) {
getMatcherRequests(matchers)
.forEach(request -> {
request.setHeader("x-recording-id", xRecordingId);
client.sendSync(request, Context.NONE);
});
} else {
this.matchers.addAll(matchers);
}
}
private boolean isPlayingBack() {
return xRecordingId != null;
}
} |
It will, though. This is part of what makes existing tests continue to work. | public HttpPipelinePolicy getRecordPolicy() {
if (enableTestProxy) {
return startProxyRecording();
}
return getRecordPolicy(Collections.emptyList());
} | return startProxyRecording(); | public HttpPipelinePolicy getRecordPolicy() {
if (testProxyEnabled) {
return getProxyRecordingPolicy();
}
return getRecordPolicy(Collections.emptyList());
} | class InterceptorManager implements AutoCloseable {
private static final String RECORD_FOLDER = "session-records/";
private static final ObjectMapper RECORD_MAPPER = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
private static final ClientLogger LOGGER = new ClientLogger(InterceptorManager.class);
private final Map<String, String> textReplacementRules;
private final String testName;
private final String playbackRecordName;
private final TestMode testMode;
private final boolean allowedToReadRecordedValues;
private final boolean allowedToRecordValues;
private final RecordedData recordedData;
private final boolean enableTestProxy;
private TestProxyRecordPolicy testProxyRecordPolicy;
private TestProxyPlaybackClient testProxyPlaybackClient;
private final Queue<String> proxyVariableQueue = new LinkedList<>();
private List<TestProxySanitizer> recordSanitizers;
private List<TestProxyMatcher> customMatcher;
/**
* Creates a new InterceptorManager that either replays test-session records or saves them.
*
* <ul>
* <li>If {@code testMode} is {@link TestMode
* record to read network calls from.</li>
* <li>If {@code testMode} is {@link TestMode
* all the network calls to it.</li>
* </ul>
*
* The test session records are persisted in the path: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param testMode The {@link TestMode} for this interceptor.
* @throws UncheckedIOException If {@code testMode} is {@link TestMode
* could not be located or the data could not be deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} is {@code null}.
* @deprecated Use {@link
*/
@Deprecated
public InterceptorManager(String testName, TestMode testMode) {
this(testName, testName, testMode, false, false);
}
/**
* Creates a new InterceptorManager that either replays test-session records or saves them.
*
* <ul>
* <li>If {@code testMode} is {@link TestMode
* record to read network calls from.</li>
* <li>If {@code testMode} is {@link TestMode
* all the network calls to it.</li>
* <li>If {@code testMode} is {@link TestMode
* record.</li>
* </ul>
*
* The test session records are persisted in the path: "<i>session-records/{@code testName}.json</i>"
*
* @param testContextManager Contextual information about the test being ran, such as test name, {@link TestMode},
* and others.
* @throws UncheckedIOException If {@code testMode} is {@link TestMode
* could not be located or the data could not be deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} is {@code null}.
*/
public InterceptorManager(TestContextManager testContextManager) {
this(testContextManager.getTestName(), testContextManager.getTestPlaybackRecordingName(),
testContextManager.getTestMode(), testContextManager.doNotRecordTest(), testContextManager.getEnableTestProxy());
}
private InterceptorManager(String testName, String playbackRecordName, TestMode testMode, boolean doNotRecord, boolean enableTestProxy) {
this.enableTestProxy = enableTestProxy;
Objects.requireNonNull(testName, "'testName' cannot be null.");
this.testName = testName;
this.playbackRecordName = CoreUtils.isNullOrEmpty(playbackRecordName) ? testName : playbackRecordName;
this.testMode = testMode;
this.textReplacementRules = new HashMap<>();
this.allowedToReadRecordedValues = (testMode == TestMode.PLAYBACK && !doNotRecord);
this.allowedToRecordValues = (testMode == TestMode.RECORD && !doNotRecord);
if (!enableTestProxy && allowedToReadRecordedValues) {
this.recordedData = readDataFromFile();
} else if (!enableTestProxy && allowedToRecordValues) {
this.recordedData = new RecordedData();
} else {
this.recordedData = null;
}
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of
* {@code textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a
* {@link NetworkCallRecord
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord
* playing back network calls.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
* @deprecated Use {@link
*/
@Deprecated
public InterceptorManager(String testName, Map<String, String> textReplacementRules) {
this(testName, textReplacementRules, false, testName);
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of
* {@code textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a
* {@link NetworkCallRecord
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord
* playing back network calls.
* @param doNotRecord Flag indicating whether network calls should be record or played back.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
* @deprecated Use {@link
*/
@Deprecated
public InterceptorManager(String testName, Map<String, String> textReplacementRules, boolean doNotRecord) {
this(testName, textReplacementRules, doNotRecord, testName);
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of
* {@code textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a
* {@link NetworkCallRecord
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord
* playing back network calls.
* @param doNotRecord Flag indicating whether network calls should be record or played back.
* @param playbackRecordName Full name of the test including its iteration, used as the playback record name.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
*/
public InterceptorManager(String testName, Map<String, String> textReplacementRules, boolean doNotRecord,
String playbackRecordName) {
Objects.requireNonNull(testName, "'testName' cannot be null.");
Objects.requireNonNull(textReplacementRules, "'textReplacementRules' cannot be null.");
this.testName = testName;
this.playbackRecordName = CoreUtils.isNullOrEmpty(playbackRecordName) ? testName : playbackRecordName;
this.testMode = TestMode.PLAYBACK;
this.allowedToReadRecordedValues = !doNotRecord;
this.allowedToRecordValues = false;
this.enableTestProxy = false;
this.recordedData = allowedToReadRecordedValues ? readDataFromFile() : null;
this.textReplacementRules = textReplacementRules;
}
/**
* Gets whether this InterceptorManager is in playback mode.
*
* @return true if the InterceptorManager is in playback mode and false otherwise.
*/
public boolean isPlaybackMode() {
return testMode == TestMode.PLAYBACK;
}
/**
* Gets whether this InterceptorManager is in live mode.
*
* @return true if the InterceptorManager is in live mode and false otherwise.
*/
public boolean isLiveMode() {
return testMode == TestMode.LIVE;
}
/**
* Gets the recorded data InterceptorManager is keeping track of.
*
* @return The recorded data managed by InterceptorManager.
*/
public RecordedData getRecordedData() {
return recordedData;
}
/**
* A {@link Supplier} for retrieving a variable from a test proxy recording.
* @return The supplier for retrieving a variable.
*/
public Supplier<String> getProxyVariableSupplier() {
return () -> {
Objects.requireNonNull(this.testProxyPlaybackClient, "Playback must be started to retrieve values");
return proxyVariableQueue.remove();
};
}
/**
* Get a {@link Consumer} for adding variables used in test proxy tests.
* @return The consumer for adding a variable.
*/
public Consumer<String> getProxyVariableConsumer() {
return proxyVariableQueue::add;
}
/**
* Gets a new HTTP pipeline policy that records network calls and its data is managed by
* {@link InterceptorManager}.
*
* @return HttpPipelinePolicy to record network calls.
*/
/**
* Gets a new HTTP pipeline policy that records network calls. The recorded content is redacted by the given list of
* redactor functions to hide sensitive information.
*
* @param recordingRedactors The custom redactor functions that are applied in addition to the default redactor
* functions defined in {@link RecordingRedactor}.
* @return {@link HttpPipelinePolicy} to record network calls.
*/
public HttpPipelinePolicy getRecordPolicy(List<Function<String, String>> recordingRedactors) {
if (enableTestProxy) {
proxyVariableQueue.clear();
return startProxyRecording();
}
return new RecordNetworkCallPolicy(recordedData, recordingRedactors);
}
/**
* Gets a new HTTP client that plays back test session records managed by {@link InterceptorManager}.
*
* @return An HTTP client that plays back network calls from its recorded data.
*/
public HttpClient getPlaybackClient() {
if (enableTestProxy) {
testProxyPlaybackClient = new TestProxyPlaybackClient(this.recordSanitizers, this.customMatcher);
proxyVariableQueue.addAll(testProxyPlaybackClient.startPlayback(playbackRecordName));
return testProxyPlaybackClient;
} else {
return new PlaybackClient(recordedData, textReplacementRules);
}
}
/**
* Disposes of resources used by this InterceptorManager.
*
* If {@code testMode} is {@link TestMode
* "<i>session-records/{@code testName}.json</i>"
*/
@Override
public void close() {
if (allowedToRecordValues) {
if (enableTestProxy) {
testProxyRecordPolicy.stopRecording(proxyVariableQueue);
} else {
try (BufferedWriter writer = Files.newBufferedWriter(createRecordFile(playbackRecordName).toPath())) {
RECORD_MAPPER.writeValue(writer, recordedData);
} catch (IOException ex) {
throw LOGGER.logExceptionAsError(
new UncheckedIOException("Unable to write data to playback file.", ex));
}
}
} else if (isPlaybackMode() && enableTestProxy) {
testProxyPlaybackClient.stopPlayback();
}
}
private RecordedData readDataFromFile() {
File recordFile = getRecordFile();
try (BufferedReader reader = Files.newBufferedReader(recordFile.toPath())) {
return RECORD_MAPPER.readValue(reader, RecordedData.class);
} catch (IOException ex) {
throw LOGGER.logExceptionAsWarning(new UncheckedIOException(ex));
}
}
/**
* Get the {@link File} pointing to the folder where session records live.
* @return The session-records folder.
* @throws IllegalStateException if the session-records folder cannot be found.
*/
public static File getRecordFolder() {
URL folderUrl = InterceptorManager.class.getClassLoader().getResource(RECORD_FOLDER);
if (folderUrl != null) {
return new File(toURI(folderUrl, LOGGER));
}
throw new IllegalStateException("Unable to locate session-records folder. Please create a session-records "
+ "folder in '/src/test/resources' of the module (ex. for azure-core-test this is "
+ "'/sdk/core/azure-core-test/src/test/resources/session-records').");
}
private static URI toURI(URL url, ClientLogger logger) {
try {
return url.toURI();
} catch (URISyntaxException ex) {
throw logger.logExceptionAsError(new IllegalStateException(ex));
}
}
private HttpPipelinePolicy startProxyRecording() {
this.testProxyRecordPolicy = new TestProxyRecordPolicy(this.recordSanitizers);
testProxyRecordPolicy.startRecording(playbackRecordName);
return testProxyRecordPolicy;
}
/*
* Attempts to retrieve the playback file, if it is not found an exception is thrown as playback can't continue.
*/
private File getRecordFile() {
File recordFolder = getRecordFolder();
File playbackFile = new File(recordFolder, playbackRecordName + ".json");
File oldPlaybackFile = new File(recordFolder, testName + ".json");
if (!playbackFile.exists() && !oldPlaybackFile.exists()) {
throw LOGGER.logExceptionAsError(new RuntimeException(String.format(
"Missing both new and old playback files. Files are %s and %s.", playbackFile.getPath(),
oldPlaybackFile.getPath())));
}
if (playbackFile.exists()) {
LOGGER.info("==> Playback file path: {}", playbackFile.getPath());
return playbackFile;
} else {
LOGGER.info("==> Playback file path: {}", oldPlaybackFile.getPath());
return oldPlaybackFile;
}
}
/*
* Retrieves or creates the file that will be used to store the recorded test values.
*/
private File createRecordFile(String testName) throws IOException {
File recordFolder = getRecordFolder();
if (!recordFolder.exists()) {
if (recordFolder.mkdir()) {
LOGGER.verbose("Created directory: {}", recordFolder.getPath());
}
}
File recordFile = new File(recordFolder, testName + ".json");
if (recordFile.createNewFile()) {
LOGGER.verbose("Created record file: {}", recordFile.getPath());
}
LOGGER.info("==> Playback file path: " + recordFile);
return recordFile;
}
/**
* Add text replacement rule (regex as key, the replacement text as value) into
* {@link InterceptorManager
*
* @param regex the pattern to locate the position of replacement
* @param replacement the replacement text
*/
public void addTextReplacementRule(String regex, String replacement) {
textReplacementRules.put(regex, replacement);
}
/**
* Add text replacement rule (regex as key, the replacement text as value) into {@code recordSanitizers}
* @param testProxySanitizers the list of replacement regex and rules.
*/
public void addSanitizers(List<TestProxySanitizer> testProxySanitizers) {
this.recordSanitizers = testProxySanitizers;
}
/**
* Add matcher rules to match recorded data in playback.
* @param testProxyMatchers the list of matcher rules when playing back recorded data.
*/
public void addMatchers(List<TestProxyMatcher> testProxyMatchers) {
this.customMatcher = testProxyMatchers;
}
} | class InterceptorManager implements AutoCloseable {
private static final ObjectMapper RECORD_MAPPER = new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
private static final ClientLogger LOGGER = new ClientLogger(InterceptorManager.class);
private final Map<String, String> textReplacementRules;
private final String testName;
private final String playbackRecordName;
private final TestMode testMode;
private final boolean allowedToReadRecordedValues;
private final boolean allowedToRecordValues;
private final RecordedData recordedData;
private final boolean testProxyEnabled;
private TestProxyRecordPolicy testProxyRecordPolicy;
private TestProxyPlaybackClient testProxyPlaybackClient;
private final Queue<String> proxyVariableQueue = new LinkedList<>();
/**
* Creates a new InterceptorManager that either replays test-session records or saves them.
*
* <ul>
* <li>If {@code testMode} is {@link TestMode
* record to read network calls from.</li>
* <li>If {@code testMode} is {@link TestMode
* all the network calls to it.</li>
* </ul>
*
* The test session records are persisted in the path: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param testMode The {@link TestMode} for this interceptor.
* @throws UncheckedIOException If {@code testMode} is {@link TestMode
* could not be located or the data could not be deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} is {@code null}.
* @deprecated Use {@link
*/
@Deprecated
public InterceptorManager(String testName, TestMode testMode) {
this(testName, testName, testMode, false, false);
}
/**
* Creates a new InterceptorManager that either replays test-session records or saves them.
*
* <ul>
* <li>If {@code testMode} is {@link TestMode
* record to read network calls from.</li>
* <li>If {@code testMode} is {@link TestMode
* all the network calls to it.</li>
* <li>If {@code testMode} is {@link TestMode
* record.</li>
* </ul>
*
* The test session records are persisted in the path: "<i>session-records/{@code testName}.json</i>"
*
* @param testContextManager Contextual information about the test being ran, such as test name, {@link TestMode},
* and others.
* @throws UncheckedIOException If {@code testMode} is {@link TestMode
* could not be located or the data could not be deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} is {@code null}.
*/
public InterceptorManager(TestContextManager testContextManager) {
this(testContextManager.getTestName(), testContextManager.getTestPlaybackRecordingName(),
testContextManager.getTestMode(), testContextManager.doNotRecordTest(), testContextManager.isTestProxyEnabled());
}
private InterceptorManager(String testName, String playbackRecordName, TestMode testMode, boolean doNotRecord, boolean enableTestProxy) {
this.testProxyEnabled = enableTestProxy;
Objects.requireNonNull(testName, "'testName' cannot be null.");
this.testName = testName;
this.playbackRecordName = CoreUtils.isNullOrEmpty(playbackRecordName) ? testName : playbackRecordName;
this.testMode = testMode;
this.textReplacementRules = new HashMap<>();
this.allowedToReadRecordedValues = (testMode == TestMode.PLAYBACK && !doNotRecord);
this.allowedToRecordValues = (testMode == TestMode.RECORD && !doNotRecord);
if (!enableTestProxy && allowedToReadRecordedValues) {
this.recordedData = readDataFromFile();
} else if (!enableTestProxy && allowedToRecordValues) {
this.recordedData = new RecordedData();
} else {
this.recordedData = null;
}
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of
* {@code textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a
* {@link NetworkCallRecord
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord
* playing back network calls.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
* @deprecated Use {@link
*/
@Deprecated
public InterceptorManager(String testName, Map<String, String> textReplacementRules) {
this(testName, textReplacementRules, false, testName);
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of
* {@code textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a
* {@link NetworkCallRecord
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test session record.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord
* playing back network calls.
* @param doNotRecord Flag indicating whether network calls should be record or played back.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
* @deprecated Use {@link
*/
@Deprecated
public InterceptorManager(String testName, Map<String, String> textReplacementRules, boolean doNotRecord) {
this(testName, textReplacementRules, doNotRecord, testName);
}
/**
* Creates a new InterceptorManager that replays test session records. It takes a set of
* {@code textReplacementRules}, that can be used by {@link PlaybackClient} to replace values in a
* {@link NetworkCallRecord
*
* The test session records are read from: "<i>session-records/{@code testName}.json</i>"
*
* @param testName Name of the test.
* @param textReplacementRules A set of rules to replace text in {@link NetworkCallRecord
* playing back network calls.
* @param doNotRecord Flag indicating whether network calls should be record or played back.
* @param playbackRecordName Full name of the test including its iteration, used as the playback record name.
* @throws UncheckedIOException An existing test session record could not be located or the data could not be
* deserialized into an instance of {@link RecordedData}.
* @throws NullPointerException If {@code testName} or {@code textReplacementRules} is {@code null}.
*/
public InterceptorManager(String testName, Map<String, String> textReplacementRules, boolean doNotRecord,
String playbackRecordName) {
Objects.requireNonNull(testName, "'testName' cannot be null.");
Objects.requireNonNull(textReplacementRules, "'textReplacementRules' cannot be null.");
this.testName = testName;
this.playbackRecordName = CoreUtils.isNullOrEmpty(playbackRecordName) ? testName : playbackRecordName;
this.testMode = TestMode.PLAYBACK;
this.allowedToReadRecordedValues = !doNotRecord;
this.allowedToRecordValues = false;
this.testProxyEnabled = false;
this.recordedData = allowedToReadRecordedValues ? readDataFromFile() : null;
this.textReplacementRules = textReplacementRules;
}
/**
* Gets whether this InterceptorManager is in playback mode.
*
* @return true if the InterceptorManager is in playback mode and false otherwise.
*/
public boolean isPlaybackMode() {
return testMode == TestMode.PLAYBACK;
}
/**
* Gets whether this InterceptorManager is in live mode.
*
* @return true if the InterceptorManager is in live mode and false otherwise.
*/
public boolean isLiveMode() {
return testMode == TestMode.LIVE;
}
/**
* Gets the recorded data InterceptorManager is keeping track of.
*
* @return The recorded data managed by InterceptorManager.
*/
public RecordedData getRecordedData() {
return recordedData;
}
/**
* A {@link Supplier} for retrieving a variable from a test proxy recording.
* @return The supplier for retrieving a variable.
*/
public Supplier<String> getProxyVariableSupplier() {
return () -> {
Objects.requireNonNull(this.testProxyPlaybackClient, "Playback must be started to retrieve values");
if (!CoreUtils.isNullOrEmpty(proxyVariableQueue)) {
return proxyVariableQueue.remove();
} else {
throw LOGGER.logExceptionAsError(new RuntimeException("'proxyVariableQueue' cannot be null or empty."));
}
};
}
/**
* Get a {@link Consumer} for adding variables used in test proxy tests.
* @return The consumer for adding a variable.
*/
public Consumer<String> getProxyVariableConsumer() {
return proxyVariableQueue::add;
}
/**
* Gets a new HTTP pipeline policy that records network calls and its data is managed by
* {@link InterceptorManager}.
*
* @return HttpPipelinePolicy to record network calls.
*/
/**
* Gets a new HTTP pipeline policy that records network calls. The recorded content is redacted by the given list of
* redactor functions to hide sensitive information.
*
* @param recordingRedactors The custom redactor functions that are applied in addition to the default redactor
* functions defined in {@link RecordingRedactor}.
* @return {@link HttpPipelinePolicy} to record network calls.
*/
public HttpPipelinePolicy getRecordPolicy(List<Function<String, String>> recordingRedactors) {
if (testProxyEnabled) {
return getProxyRecordingPolicy();
}
return new RecordNetworkCallPolicy(recordedData, recordingRedactors);
}
/**
* Gets a new HTTP client that plays back test session records managed by {@link InterceptorManager}.
*
* @return An HTTP client that plays back network calls from its recorded data.
*/
public HttpClient getPlaybackClient() {
if (testProxyEnabled) {
if (testProxyPlaybackClient == null) {
testProxyPlaybackClient = new TestProxyPlaybackClient();
proxyVariableQueue.addAll(testProxyPlaybackClient.startPlayback(playbackRecordName));
}
return testProxyPlaybackClient;
} else {
return new PlaybackClient(recordedData, textReplacementRules);
}
}
/**
* Disposes of resources used by this InterceptorManager.
*
* If {@code testMode} is {@link TestMode
* "<i>session-records/{@code testName}.json</i>"
*/
@Override
public void close() {
if (allowedToRecordValues) {
if (testProxyEnabled) {
testProxyRecordPolicy.stopRecording(proxyVariableQueue);
} else {
try (BufferedWriter writer = Files.newBufferedWriter(createRecordFile(playbackRecordName).toPath())) {
RECORD_MAPPER.writeValue(writer, recordedData);
} catch (IOException ex) {
throw LOGGER.logExceptionAsError(
new UncheckedIOException("Unable to write data to playback file.", ex));
}
}
} else if (isPlaybackMode() && testProxyEnabled && allowedToReadRecordedValues) {
testProxyPlaybackClient.stopPlayback();
}
}
private RecordedData readDataFromFile() {
File recordFile = getRecordFile();
try (BufferedReader reader = Files.newBufferedReader(recordFile.toPath())) {
return RECORD_MAPPER.readValue(reader, RecordedData.class);
} catch (IOException ex) {
throw LOGGER.logExceptionAsWarning(new UncheckedIOException(ex));
}
}
private HttpPipelinePolicy getProxyRecordingPolicy() {
if (testProxyRecordPolicy == null) {
testProxyRecordPolicy = new TestProxyRecordPolicy();
testProxyRecordPolicy.startRecording(playbackRecordName);
}
return testProxyRecordPolicy;
}
/*
* Attempts to retrieve the playback file, if it is not found an exception is thrown as playback can't continue.
*/
private File getRecordFile() {
File recordFolder = TestUtils.getRecordFolder();
File playbackFile = new File(recordFolder, playbackRecordName + ".json");
File oldPlaybackFile = new File(recordFolder, testName + ".json");
if (!playbackFile.exists() && !oldPlaybackFile.exists()) {
throw LOGGER.logExceptionAsError(new RuntimeException(String.format(
"Missing both new and old playback files. Files are %s and %s.", playbackFile.getPath(),
oldPlaybackFile.getPath())));
}
if (playbackFile.exists()) {
LOGGER.info("==> Playback file path: {}", playbackFile.getPath());
return playbackFile;
} else {
LOGGER.info("==> Playback file path: {}", oldPlaybackFile.getPath());
return oldPlaybackFile;
}
}
/*
* Retrieves or creates the file that will be used to store the recorded test values.
*/
private File createRecordFile(String testName) throws IOException {
File recordFolder = TestUtils.getRecordFolder();
if (!recordFolder.exists()) {
if (recordFolder.mkdir()) {
LOGGER.verbose("Created directory: {}", recordFolder.getPath());
}
}
File recordFile = new File(recordFolder, testName + ".json");
if (recordFile.createNewFile()) {
LOGGER.verbose("Created record file: {}", recordFile.getPath());
}
LOGGER.info("==> Playback file path: " + recordFile);
return recordFile;
}
/**
* Add text replacement rule (regex as key, the replacement text as value) into
* {@link InterceptorManager
*
* @param regex the pattern to locate the position of replacement
* @param replacement the replacement text
*/
public void addTextReplacementRule(String regex, String replacement) {
textReplacementRules.put(regex, replacement);
}
/**
* Add sanitizer rule for sanitization during record or playback.
* @param testProxySanitizers the list of replacement regex and rules.
* @throws RuntimeException Neither playback or record has started.
*/
public void addSanitizers(List<TestProxySanitizer> testProxySanitizers) {
if (testProxyPlaybackClient != null) {
testProxyPlaybackClient.addProxySanitization(testProxySanitizers);
} else if (testProxyRecordPolicy != null) {
testProxyRecordPolicy.addProxySanitization(testProxySanitizers);
} else {
throw new RuntimeException("Playback or record must have been started before adding sanitizers.");
}
}
/**
* Add matcher rules to match recorded data in playback.
* @param testProxyMatchers the list of matcher rules when playing back recorded data.
* @throws RuntimeException Playback has not started.
*/
public void addMatchers(List<TestProxyRequestMatcher> testProxyMatchers) {
if (testProxyPlaybackClient != null) {
testProxyPlaybackClient.addMatcherRequests(testProxyMatchers);
} else {
throw new RuntimeException("Playback must have been started before adding matchers.");
}
}
} |
We should add tests to ensure cancelled `Mono` no longer hangs. | public Mono<AccessToken> getToken() {
return Mono.defer(() -> {
try {
if (wip.compareAndSet(null, Sinks.one())) {
final Sinks.One<AccessToken> sinksOne = wip.get();
OffsetDateTime now = OffsetDateTime.now();
Mono<AccessToken> tokenRefresh;
Mono<AccessToken> fallback;
if (cache != null && !shouldRefresh.test(cache)) {
tokenRefresh = Mono.empty();
fallback = Mono.just(cache);
} else if (cache == null || cache.isExpired()) {
if (now.isAfter(nextTokenRefresh)) {
tokenRefresh = Mono.defer(tokenSupplier);
} else {
tokenRefresh = Mono.defer(tokenSupplier)
.delaySubscription(Duration.between(now, nextTokenRefresh));
}
fallback = Mono.empty();
} else {
if (now.isAfter(nextTokenRefresh)) {
tokenRefresh = Mono.defer(tokenSupplier);
} else {
tokenRefresh = Mono.empty();
}
fallback = Mono.just(cache);
}
return tokenRefresh
.materialize()
.flatMap(signal -> {
AccessToken accessToken = signal.get();
Throwable error = signal.getThrowable();
if (signal.isOnNext() && accessToken != null) {
LOGGER.log(LogLevel.INFORMATIONAL,
() -> refreshLog(cache, now, "Acquired a new access token", true));
cache = accessToken;
sinksOne.tryEmitValue(accessToken);
nextTokenRefresh = OffsetDateTime.now().plus(REFRESH_DELAY);
return Mono.just(accessToken);
} else if (signal.isOnError() && error != null) {
LOGGER.log(LogLevel.ERROR,
() -> refreshLog(cache, now, "Failed to acquire a new access token", false));
nextTokenRefresh = OffsetDateTime.now().plus(REFRESH_DELAY);
return fallback.switchIfEmpty(Mono.error(() -> error));
} else {
sinksOne.tryEmitEmpty();
return fallback;
}
})
.doOnError(sinksOne::tryEmitError)
.doFinally(ignored -> wip.set(null));
} else if (cache != null && !cache.isExpired()) {
return Mono.just(cache);
} else {
Sinks.One<AccessToken> sinksOne = wip.get();
if (sinksOne == null) {
return Mono.just(cache);
} else {
return sinksOne.asMono().switchIfEmpty(Mono.fromSupplier(() -> cache));
}
}
} catch (Exception t) {
return Mono.error(t);
}
});
} | .doFinally(ignored -> wip.set(null)); | public Mono<AccessToken> getToken() {
return Mono.defer(() -> {
try {
if (wip.compareAndSet(null, Sinks.one())) {
final Sinks.One<AccessToken> sinksOne = wip.get();
OffsetDateTime now = OffsetDateTime.now();
Mono<AccessToken> tokenRefresh;
Mono<AccessToken> fallback;
if (cache != null && !shouldRefresh.test(cache)) {
tokenRefresh = Mono.empty();
fallback = Mono.just(cache);
} else if (cache == null || cache.isExpired()) {
if (now.isAfter(nextTokenRefresh)) {
tokenRefresh = Mono.defer(tokenSupplier);
} else {
tokenRefresh = Mono.defer(tokenSupplier)
.delaySubscription(Duration.between(now, nextTokenRefresh));
}
fallback = Mono.empty();
} else {
if (now.isAfter(nextTokenRefresh)) {
tokenRefresh = Mono.defer(tokenSupplier);
} else {
tokenRefresh = Mono.empty();
}
fallback = Mono.just(cache);
}
return tokenRefresh
.materialize()
.flatMap(signal -> {
AccessToken accessToken = signal.get();
Throwable error = signal.getThrowable();
if (signal.isOnNext() && accessToken != null) {
LOGGER.log(LogLevel.INFORMATIONAL,
() -> refreshLog(cache, now, "Acquired a new access token", true));
cache = accessToken;
sinksOne.tryEmitValue(accessToken);
nextTokenRefresh = OffsetDateTime.now().plus(REFRESH_DELAY);
return Mono.just(accessToken);
} else if (signal.isOnError() && error != null) {
LOGGER.log(LogLevel.ERROR,
() -> refreshLog(cache, now, "Failed to acquire a new access token", false));
nextTokenRefresh = OffsetDateTime.now().plus(REFRESH_DELAY);
return fallback.switchIfEmpty(Mono.error(() -> error));
} else {
sinksOne.tryEmitEmpty();
return fallback;
}
})
.doOnError(sinksOne::tryEmitError)
.doFinally(ignored -> wip.set(null));
} else if (cache != null && !cache.isExpired()) {
return Mono.just(cache);
} else {
Sinks.One<AccessToken> sinksOne = wip.get();
if (sinksOne == null) {
return Mono.just(cache);
} else {
return sinksOne.asMono().switchIfEmpty(Mono.fromSupplier(() -> cache));
}
}
} catch (Exception t) {
return Mono.error(t);
}
});
} | class SimpleTokenCache {
private static final Duration REFRESH_DELAY = Duration.ofSeconds(30);
private static final String REFRESH_DELAY_STRING = String.valueOf(REFRESH_DELAY.getSeconds());
private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
private static final ClientLogger LOGGER = new ClientLogger(SimpleTokenCache.class);
private static final String NO_CACHE_ACQUIRED = "Acquired a new access token.";
private static final String NO_CACHE_FAILED = "Failed to acquire a new access token.";
private static final String NEGATIVE_TTE =
" seconds after expiry. Retry may be attempted after " + REFRESH_DELAY_STRING + " seconds.";
private static final String POSITIVE_TTE =
" seconds before expiry. Retry may be attempted after " + REFRESH_DELAY_STRING + " seconds.";
private final AtomicReference<Sinks.One<AccessToken>> wip;
private volatile AccessToken cache;
private volatile OffsetDateTime nextTokenRefresh = OffsetDateTime.now();
private final Supplier<Mono<AccessToken>> tokenSupplier;
private final Predicate<AccessToken> shouldRefresh;
/**
* Creates an instance of RefreshableTokenCredential with default scheme "Bearer".
*
* @param tokenSupplier a method to get a new token
*/
public SimpleTokenCache(Supplier<Mono<AccessToken>> tokenSupplier) {
this.wip = new AtomicReference<>();
this.tokenSupplier = tokenSupplier;
this.shouldRefresh = accessToken -> OffsetDateTime.now()
.isAfter(accessToken.getExpiresAt().minus(REFRESH_OFFSET));
}
/**
* Asynchronously get a token from either the cache or replenish the cache with a new token.
* @return a Publisher that emits an AccessToken
*/
private static String refreshLog(AccessToken cache, OffsetDateTime now, String log, boolean acquired) {
if (cache == null) {
return acquired ? NO_CACHE_ACQUIRED : NO_CACHE_FAILED;
}
Duration tte = Duration.between(now, cache.getExpiresAt());
StringBuilder info = new StringBuilder(log.length() + 128).append(log);
info.append(" at ").append(tte.abs().getSeconds())
.append(tte.isNegative() ? NEGATIVE_TTE : POSITIVE_TTE);
if (!tte.isNegative()) {
info.append(" The token currently cached will be used.");
}
return info.toString();
}
} | class SimpleTokenCache {
private static final Duration REFRESH_DELAY = Duration.ofSeconds(30);
private static final String REFRESH_DELAY_STRING = String.valueOf(REFRESH_DELAY.getSeconds());
private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
private static final ClientLogger LOGGER = new ClientLogger(SimpleTokenCache.class);
private static final String NO_CACHE_ACQUIRED = "Acquired a new access token.";
private static final String NO_CACHE_FAILED = "Failed to acquire a new access token.";
private static final String NEGATIVE_TTE = " seconds after expiry. Retry may be attempted after "
+ REFRESH_DELAY_STRING + " seconds.";
private static final String POSITIVE_TTE = " seconds before expiry. Retry may be attempted after "
+ REFRESH_DELAY_STRING + " seconds. The token currently cached will be used.";
private final AtomicReference<Sinks.One<AccessToken>> wip;
private volatile AccessToken cache;
private volatile OffsetDateTime nextTokenRefresh = OffsetDateTime.now();
private final Supplier<Mono<AccessToken>> tokenSupplier;
private final Predicate<AccessToken> shouldRefresh;
/**
* Creates an instance of RefreshableTokenCredential with default scheme "Bearer".
*
* @param tokenSupplier a method to get a new token
*/
public SimpleTokenCache(Supplier<Mono<AccessToken>> tokenSupplier) {
this.wip = new AtomicReference<>();
this.tokenSupplier = tokenSupplier;
this.shouldRefresh = accessToken -> OffsetDateTime.now()
.isAfter(accessToken.getExpiresAt().minus(REFRESH_OFFSET));
}
/**
* Asynchronously get a token from either the cache or replenish the cache with a new token.
* @return a Publisher that emits an AccessToken
*/
Sinks.One<AccessToken> getWipValue() {
return wip.get();
}
private static String refreshLog(AccessToken cache, OffsetDateTime now, String log, boolean acquired) {
if (cache == null) {
return acquired ? NO_CACHE_ACQUIRED : NO_CACHE_FAILED;
}
Duration tte = Duration.between(now, cache.getExpiresAt());
return log + " at " + tte.abs().getSeconds() + (tte.isNegative() ? NEGATIVE_TTE : POSITIVE_TTE);
}
} |
We can update POSITIVE_TTE to include this string. So, we don't need to check `!tte.isNegative()`. | private static String refreshLog(AccessToken cache, OffsetDateTime now, String log, boolean acquired) {
if (cache == null) {
return acquired ? NO_CACHE_ACQUIRED : NO_CACHE_FAILED;
}
Duration tte = Duration.between(now, cache.getExpiresAt());
StringBuilder info = new StringBuilder(log.length() + 128).append(log);
info.append(" at ").append(tte.abs().getSeconds())
.append(tte.isNegative() ? NEGATIVE_TTE : POSITIVE_TTE);
if (!tte.isNegative()) {
info.append(" The token currently cached will be used.");
}
return info.toString();
} | info.append(" The token currently cached will be used."); | private static String refreshLog(AccessToken cache, OffsetDateTime now, String log, boolean acquired) {
if (cache == null) {
return acquired ? NO_CACHE_ACQUIRED : NO_CACHE_FAILED;
}
Duration tte = Duration.between(now, cache.getExpiresAt());
return log + " at " + tte.abs().getSeconds() + (tte.isNegative() ? NEGATIVE_TTE : POSITIVE_TTE);
} | class SimpleTokenCache {
private static final Duration REFRESH_DELAY = Duration.ofSeconds(30);
private static final String REFRESH_DELAY_STRING = String.valueOf(REFRESH_DELAY.getSeconds());
private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
private static final ClientLogger LOGGER = new ClientLogger(SimpleTokenCache.class);
private static final String NO_CACHE_ACQUIRED = "Acquired a new access token.";
private static final String NO_CACHE_FAILED = "Failed to acquire a new access token.";
private static final String NEGATIVE_TTE =
" seconds after expiry. Retry may be attempted after " + REFRESH_DELAY_STRING + " seconds.";
private static final String POSITIVE_TTE =
" seconds before expiry. Retry may be attempted after " + REFRESH_DELAY_STRING + " seconds.";
private final AtomicReference<Sinks.One<AccessToken>> wip;
private volatile AccessToken cache;
private volatile OffsetDateTime nextTokenRefresh = OffsetDateTime.now();
private final Supplier<Mono<AccessToken>> tokenSupplier;
private final Predicate<AccessToken> shouldRefresh;
/**
* Creates an instance of RefreshableTokenCredential with default scheme "Bearer".
*
* @param tokenSupplier a method to get a new token
*/
public SimpleTokenCache(Supplier<Mono<AccessToken>> tokenSupplier) {
this.wip = new AtomicReference<>();
this.tokenSupplier = tokenSupplier;
this.shouldRefresh = accessToken -> OffsetDateTime.now()
.isAfter(accessToken.getExpiresAt().minus(REFRESH_OFFSET));
}
/**
* Asynchronously get a token from either the cache or replenish the cache with a new token.
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> getToken() {
return Mono.defer(() -> {
try {
if (wip.compareAndSet(null, Sinks.one())) {
final Sinks.One<AccessToken> sinksOne = wip.get();
OffsetDateTime now = OffsetDateTime.now();
Mono<AccessToken> tokenRefresh;
Mono<AccessToken> fallback;
if (cache != null && !shouldRefresh.test(cache)) {
tokenRefresh = Mono.empty();
fallback = Mono.just(cache);
} else if (cache == null || cache.isExpired()) {
if (now.isAfter(nextTokenRefresh)) {
tokenRefresh = Mono.defer(tokenSupplier);
} else {
tokenRefresh = Mono.defer(tokenSupplier)
.delaySubscription(Duration.between(now, nextTokenRefresh));
}
fallback = Mono.empty();
} else {
if (now.isAfter(nextTokenRefresh)) {
tokenRefresh = Mono.defer(tokenSupplier);
} else {
tokenRefresh = Mono.empty();
}
fallback = Mono.just(cache);
}
return tokenRefresh
.materialize()
.flatMap(signal -> {
AccessToken accessToken = signal.get();
Throwable error = signal.getThrowable();
if (signal.isOnNext() && accessToken != null) {
LOGGER.log(LogLevel.INFORMATIONAL,
() -> refreshLog(cache, now, "Acquired a new access token", true));
cache = accessToken;
sinksOne.tryEmitValue(accessToken);
nextTokenRefresh = OffsetDateTime.now().plus(REFRESH_DELAY);
return Mono.just(accessToken);
} else if (signal.isOnError() && error != null) {
LOGGER.log(LogLevel.ERROR,
() -> refreshLog(cache, now, "Failed to acquire a new access token", false));
nextTokenRefresh = OffsetDateTime.now().plus(REFRESH_DELAY);
return fallback.switchIfEmpty(Mono.error(() -> error));
} else {
sinksOne.tryEmitEmpty();
return fallback;
}
})
.doOnError(sinksOne::tryEmitError)
.doFinally(ignored -> wip.set(null));
} else if (cache != null && !cache.isExpired()) {
return Mono.just(cache);
} else {
Sinks.One<AccessToken> sinksOne = wip.get();
if (sinksOne == null) {
return Mono.just(cache);
} else {
return sinksOne.asMono().switchIfEmpty(Mono.fromSupplier(() -> cache));
}
}
} catch (Exception t) {
return Mono.error(t);
}
});
}
} | class SimpleTokenCache {
private static final Duration REFRESH_DELAY = Duration.ofSeconds(30);
private static final String REFRESH_DELAY_STRING = String.valueOf(REFRESH_DELAY.getSeconds());
private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
private static final ClientLogger LOGGER = new ClientLogger(SimpleTokenCache.class);
private static final String NO_CACHE_ACQUIRED = "Acquired a new access token.";
private static final String NO_CACHE_FAILED = "Failed to acquire a new access token.";
private static final String NEGATIVE_TTE = " seconds after expiry. Retry may be attempted after "
+ REFRESH_DELAY_STRING + " seconds.";
private static final String POSITIVE_TTE = " seconds before expiry. Retry may be attempted after "
+ REFRESH_DELAY_STRING + " seconds. The token currently cached will be used.";
private final AtomicReference<Sinks.One<AccessToken>> wip;
private volatile AccessToken cache;
private volatile OffsetDateTime nextTokenRefresh = OffsetDateTime.now();
private final Supplier<Mono<AccessToken>> tokenSupplier;
private final Predicate<AccessToken> shouldRefresh;
/**
* Creates an instance of RefreshableTokenCredential with default scheme "Bearer".
*
* @param tokenSupplier a method to get a new token
*/
public SimpleTokenCache(Supplier<Mono<AccessToken>> tokenSupplier) {
this.wip = new AtomicReference<>();
this.tokenSupplier = tokenSupplier;
this.shouldRefresh = accessToken -> OffsetDateTime.now()
.isAfter(accessToken.getExpiresAt().minus(REFRESH_OFFSET));
}
/**
* Asynchronously get a token from either the cache or replenish the cache with a new token.
* @return a Publisher that emits an AccessToken
*/
public Mono<AccessToken> getToken() {
return Mono.defer(() -> {
try {
if (wip.compareAndSet(null, Sinks.one())) {
final Sinks.One<AccessToken> sinksOne = wip.get();
OffsetDateTime now = OffsetDateTime.now();
Mono<AccessToken> tokenRefresh;
Mono<AccessToken> fallback;
if (cache != null && !shouldRefresh.test(cache)) {
tokenRefresh = Mono.empty();
fallback = Mono.just(cache);
} else if (cache == null || cache.isExpired()) {
if (now.isAfter(nextTokenRefresh)) {
tokenRefresh = Mono.defer(tokenSupplier);
} else {
tokenRefresh = Mono.defer(tokenSupplier)
.delaySubscription(Duration.between(now, nextTokenRefresh));
}
fallback = Mono.empty();
} else {
if (now.isAfter(nextTokenRefresh)) {
tokenRefresh = Mono.defer(tokenSupplier);
} else {
tokenRefresh = Mono.empty();
}
fallback = Mono.just(cache);
}
return tokenRefresh
.materialize()
.flatMap(signal -> {
AccessToken accessToken = signal.get();
Throwable error = signal.getThrowable();
if (signal.isOnNext() && accessToken != null) {
LOGGER.log(LogLevel.INFORMATIONAL,
() -> refreshLog(cache, now, "Acquired a new access token", true));
cache = accessToken;
sinksOne.tryEmitValue(accessToken);
nextTokenRefresh = OffsetDateTime.now().plus(REFRESH_DELAY);
return Mono.just(accessToken);
} else if (signal.isOnError() && error != null) {
LOGGER.log(LogLevel.ERROR,
() -> refreshLog(cache, now, "Failed to acquire a new access token", false));
nextTokenRefresh = OffsetDateTime.now().plus(REFRESH_DELAY);
return fallback.switchIfEmpty(Mono.error(() -> error));
} else {
sinksOne.tryEmitEmpty();
return fallback;
}
})
.doOnError(sinksOne::tryEmitError)
.doFinally(ignored -> wip.set(null));
} else if (cache != null && !cache.isExpired()) {
return Mono.just(cache);
} else {
Sinks.One<AccessToken> sinksOne = wip.get();
if (sinksOne == null) {
return Mono.just(cache);
} else {
return sinksOne.asMono().switchIfEmpty(Mono.fromSupplier(() -> cache));
}
}
} catch (Exception t) {
return Mono.error(t);
}
});
}
Sinks.One<AccessToken> getWipValue() {
return wip.get();
}
} |
We've generally used either 4KB or 8KB as the buffer sizes as it hits a nice point between memory and CPU usage, unless this certificate is always known to be < 1KB then I'll revert this. | private byte[] getCertificateBytes() throws IOException {
if (certificatePath != null) {
return Files.readAllBytes(Paths.get(certificatePath));
} else if (certificate != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int read = certificate.read(buffer, 0, buffer.length);
while (read != -1) {
outputStream.write(buffer, 0, read);
read = certificate.read(buffer, 0, buffer.length);
}
return outputStream.toByteArray();
} else {
return new byte[0];
}
} | byte[] buffer = new byte[4096]; | private byte[] getCertificateBytes() throws IOException {
if (certificatePath != null) {
return Files.readAllBytes(Paths.get(certificatePath));
} else if (certificate != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int read = certificate.read(buffer, 0, buffer.length);
while (read != -1) {
outputStream.write(buffer, 0, read);
read = certificate.read(buffer, 0, buffer.length);
}
return outputStream.toByteArray();
} else {
return new byte[0];
}
} | class IdentityClientBase {
static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();
static final String WINDOWS_STARTER = "cmd.exe";
static final String LINUX_MAC_STARTER = "/bin/sh";
static final String WINDOWS_SWITCHER = "/c";
static final String LINUX_MAC_SWITCHER = "-c";
static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized";
static final Pattern LINUX_MAC_PROCESS_ERROR_MESSAGE = Pattern.compile("(.*)az:(.*)not found");
static final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv("SystemRoot");
static final String DEFAULT_WINDOWS_PS_EXECUTABLE = "pwsh.exe";
static final String LEGACY_WINDOWS_PS_EXECUTABLE = "powershell.exe";
static final String DEFAULT_LINUX_PS_EXECUTABLE = "pwsh";
static final String DEFAULT_MAC_LINUX_PATH = "/bin/";
static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01";
static final String MSI_ENDPOINT_VERSION = "2017-09-01";
static final String ARC_MANAGED_IDENTITY_ENDPOINT_API_VERSION = "2019-11-01";
static final String ADFS_TENANT = "adfs";
static final String HTTP_LOCALHOST = "http:
static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview";
static final ClientLogger LOGGER = new ClientLogger(IdentityClient.class);
static final Pattern ACCESS_TOKEN_PATTERN = Pattern.compile("\"accessToken\": \"(.*?)(\"|$)");
static final Pattern TRAILING_FORWARD_SLASHES = Pattern.compile("/+$");
private static final String AZURE_IDENTITY_PROPERTIES = "azure-identity.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private final Map<String, String> properties;
final IdentityClientOptions options;
final String tenantId;
final String clientId;
final String resourceId;
final String clientSecret;
final String clientAssertionFilePath;
final InputStream certificate;
final String certificatePath;
final Supplier<String> clientAssertionSupplier;
final String certificatePassword;
HttpPipelineAdapter httpPipelineAdapter;
String userAgent = UserAgentUtil.DEFAULT_USER_AGENT_HEADER;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param resourceId the resource ID of the application
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param clientAssertionTimeout the timeout to use for the client assertion.
* @param options the options configuring the client.
*/
IdentityClientBase(String tenantId, String clientId, String clientSecret, String certificatePath,
String clientAssertionFilePath, String resourceId, Supplier<String> clientAssertionSupplier,
InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
Duration clientAssertionTimeout, IdentityClientOptions options) {
if (tenantId == null) {
tenantId = IdentityUtil.DEFAULT_TENANT;
options.setAdditionallyAllowedTenants(Collections.singletonList(IdentityUtil.ALL_TENANTS));
}
if (options == null) {
options = new IdentityClientOptions();
}
this.tenantId = tenantId;
this.clientId = clientId;
this.resourceId = resourceId;
this.clientSecret = clientSecret;
this.clientAssertionFilePath = clientAssertionFilePath;
this.certificatePath = certificatePath;
this.certificate = certificate;
this.certificatePassword = certificatePassword;
this.clientAssertionSupplier = clientAssertionSupplier;
this.options = options;
properties = CoreUtils.getProperties(AZURE_IDENTITY_PROPERTIES);
}
ConfidentialClientApplication getConfidentialClient() {
if (clientId == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/"
+ tenantId;
IClientCredential credential;
if (clientSecret != null) {
credential = ClientCredentialFactory.createFromSecret(clientSecret);
} else if (certificate != null || certificatePath != null) {
try {
if (certificatePassword == null) {
byte[] pemCertificateBytes = getCertificateBytes();
List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);
PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);
if (x509CertificateList.size() == 1) {
credential = ClientCredentialFactory.createFromCertificate(
privateKey, x509CertificateList.get(0));
} else {
credential = ClientCredentialFactory.createFromCertificateChain(
privateKey, x509CertificateList);
}
} else {
try (InputStream pfxCertificateStream = getCertificateInputStream()) {
credential = ClientCredentialFactory.createFromCertificate(pfxCertificateStream,
certificatePassword);
}
}
} catch (IOException | GeneralSecurityException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(
"Failed to parse the certificate for the credential: " + e.getMessage(), e));
}
} else if (clientAssertionSupplier != null) {
credential = ClientCredentialFactory.createFromClientAssertion(clientAssertionSupplier.get());
} else {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Must provide client secret or client certificate path."
+ " To mitigate this issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId, credential);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
applicationBuilder.sendX5c(options.isIncludeX5c());
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();
PersistentTokenCacheImpl tokenCache = null;
if (tokenCachePersistenceOptions != null) {
try {
tokenCache = new PersistentTokenCacheImpl()
.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())
.setName(tokenCachePersistenceOptions.getName());
applicationBuilder.setTokenCacheAccessAspect(tokenCache);
} catch (Throwable t) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
if (options.getRegionalAuthority() != null) {
if (options.getRegionalAuthority() == RegionalAuthority.AUTO_DISCOVER_REGION) {
applicationBuilder.autoDetectRegion(true);
} else {
applicationBuilder.azureRegion(options.getRegionalAuthority().toString());
}
}
ConfidentialClientApplication confidentialClientApplication = applicationBuilder.build();
if (tokenCache != null) {
tokenCache.registerCache();
}
return confidentialClientApplication;
}
PublicClientApplication getPublicClient(boolean sharedTokenCacheCredential) {
if (clientId == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/"
+ tenantId;
PublicClientApplication.Builder builder = PublicClientApplication.builder(clientId);
try {
builder = builder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
builder.httpClient(httpPipelineAdapter);
} else {
builder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
builder.executorService(options.getExecutorService());
}
if (!options.isCp1Disabled()) {
Set<String> set = new HashSet<>(1);
set.add("CP1");
builder.clientCapabilities(set);
}
TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();
PersistentTokenCacheImpl tokenCache = null;
if (tokenCachePersistenceOptions != null) {
try {
tokenCache = new PersistentTokenCacheImpl()
.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())
.setName(tokenCachePersistenceOptions.getName());
builder.setTokenCacheAccessAspect(tokenCache);
} catch (Throwable t) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
PublicClientApplication publicClientApplication = builder.build();
if (tokenCache != null) {
tokenCache.registerCache();
}
return publicClientApplication;
}
ConfidentialClientApplication getManagedIdentityConfidentialClient() {
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("")
+ "/" + tenantId;
IClientCredential credential = ClientCredentialFactory
.createFromSecret(clientSecret != null ? clientSecret : "dummy-secret");
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId == null ? "SYSTEM-ASSIGNED-MANAGED-IDENTITY"
: clientId, credential);
applicationBuilder.validateAuthority(false);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
if (options.getManagedIdentityType() == null) {
throw LOGGER.logExceptionAsError(
new CredentialUnavailableException("Managed Identity type not configured, authentication not available."));
}
applicationBuilder.appTokenProvider(appTokenProviderParameters -> {
TokenRequestContext trc = new TokenRequestContext()
.setScopes(new ArrayList<>(appTokenProviderParameters.scopes))
.setClaims(appTokenProviderParameters.claims)
.setTenantId(appTokenProviderParameters.tenantId);
Mono<AccessToken> accessTokenAsync = getTokenFromTargetManagedIdentity(trc);
return accessTokenAsync.map(accessToken -> {
TokenProviderResult result = new TokenProviderResult();
result.setAccessToken(accessToken.getToken());
result.setTenantId(trc.getTenantId());
result.setExpiresInSeconds(accessToken.getExpiresAt().toEpochSecond());
return result;
}).toFuture();
});
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
return applicationBuilder.build();
}
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder buildDeviceCodeFlowParameters(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) {
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =
DeviceCodeFlowParameters.builder(
new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(
new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),
OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return parametersBuilder;
}
OnBehalfOfParameters buildOBOFlowParameters(TokenRequestContext request) {
return OnBehalfOfParameters
.builder(new HashSet<>(request.getScopes()), options.getUserAssertion())
.tenant(IdentityUtil.resolveTenantId(tenantId, request, options))
.build();
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder buildInteractiveRequestParameters(TokenRequestContext request, String loginHint, URI redirectUri) {
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
InteractiveRequestParameters.builder(redirectUri)
.scopes(new HashSet<>(request.getScopes()))
.prompt(Prompt.SELECT_ACCOUNT)
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
builder.claims(customClaimRequest);
}
if (loginHint != null) {
builder.loginHint(loginHint);
}
return builder;
}
UserNamePasswordParameters.UserNamePasswordParametersBuilder buildUsernamePasswordFlowParameters(TokenRequestContext request, String username, String password) {
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),
username, password.toCharArray());
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
userNamePasswordParametersBuilder.claims(customClaimRequest);
}
userNamePasswordParametersBuilder.tenant(
IdentityUtil.resolveTenantId(tenantId, request, options));
return userNamePasswordParametersBuilder;
}
AccessToken getTokenFromAzureCLIAuthentication(StringBuilder azCommand) {
AccessToken token;
try {
String starter;
String switcher;
if (isWindowsPlatform()) {
starter = WINDOWS_STARTER;
switcher = WINDOWS_SWITCHER;
} else {
starter = LINUX_MAC_STARTER;
switcher = LINUX_MAC_SWITCHER;
}
ProcessBuilder builder = new ProcessBuilder(starter, switcher, azCommand.toString());
String workingDirectory = getSafeWorkingDirectory();
if (workingDirectory != null) {
builder.directory(new File(workingDirectory));
} else {
throw LOGGER.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be"
+ " found to execute CLI command from. To mitigate this issue, please refer to the troubleshooting "
+ " guidelines here at https:
}
builder.redirectErrorStream(true);
Process process = builder.start();
StringBuilder output = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(),
StandardCharsets.UTF_8))) {
String line;
while (true) {
line = reader.readLine();
if (line == null) {
break;
}
if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE)
|| LINUX_MAC_PROCESS_ERROR_MESSAGE.matcher(line).matches()) {
throw LoggingUtil.logCredentialUnavailableException(LOGGER, options,
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable. Azure CLI not installed."
+ "To mitigate this issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
output.append(line);
}
}
String processOutput = output.toString();
process.waitFor(10, TimeUnit.SECONDS);
if (process.exitValue() != 0) {
if (processOutput.length() > 0) {
String redactedOutput = redactInfo(processOutput);
if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) {
throw LoggingUtil.logCredentialUnavailableException(LOGGER, options,
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable."
+ " Please run 'az login' to set up account. To further mitigate this"
+ " issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));
} else {
throw LOGGER.logExceptionAsError(
new ClientAuthenticationException("Failed to invoke Azure CLI ", null));
}
}
LOGGER.verbose("Azure CLI Authentication => A token response was received from Azure CLI, deserializing the"
+ " response into an Access Token.");
Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,
SerializerEncoding.JSON);
String accessToken = objectMap.get("accessToken");
String time = objectMap.get("expiresOn");
String timeToSecond = time.substring(0, time.indexOf("."));
String timeJoinedWithT = String.join("T", timeToSecond.split(" "));
OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)
.atZone(ZoneId.systemDefault())
.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);
token = new AccessToken(accessToken, expiresOn);
} catch (IOException | InterruptedException e) {
throw LOGGER.logExceptionAsError(new IllegalStateException(e));
}
return token;
}
String getSafeWorkingDirectory() {
if (isWindowsPlatform()) {
if (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) {
return null;
}
return DEFAULT_WINDOWS_SYSTEM_ROOT + "\\system32";
} else {
return DEFAULT_MAC_LINUX_PATH;
}
}
boolean isWindowsPlatform() {
return System.getProperty("os.name").contains("Windows");
}
String redactInfo(String input) {
return ACCESS_TOKEN_PATTERN.matcher(input).replaceAll("****");
}
abstract Mono<AccessToken> getTokenFromTargetManagedIdentity(TokenRequestContext tokenRequestContext);
HttpPipeline setupPipeline(HttpClient httpClient) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
HttpLogOptions httpLogOptions = new HttpLogOptions();
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
Configuration buildConfiguration = Configuration.getGlobalConfiguration().clone();
userAgent = UserAgentUtil.toUserAgentString(null, clientName, clientVersion, buildConfiguration);
policies.add(new UserAgentPolicy(userAgent));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RetryPolicy());
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder().httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0])).build();
}
void initializeHttpPipelineAdapter() {
HttpPipeline httpPipeline = options.getHttpPipeline();
if (httpPipeline != null) {
httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline, options);
} else {
HttpClient httpClient = options.getHttpClient();
if (httpClient != null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient), options);
} else if (options.getProxyOptions() == null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()), options);
}
}
}
private InputStream getCertificateInputStream() throws IOException {
if (certificatePath != null) {
return new BufferedInputStream(new FileInputStream(certificatePath));
} else {
return certificate;
}
}
private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {
switch (options.getType()) {
case SOCKS4:
case SOCKS5:
return new Proxy(Proxy.Type.SOCKS, options.getAddress());
case HTTP:
default:
return new Proxy(Proxy.Type.HTTP, options.getAddress());
}
}
} | class IdentityClientBase {
static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();
static final String WINDOWS_STARTER = "cmd.exe";
static final String LINUX_MAC_STARTER = "/bin/sh";
static final String WINDOWS_SWITCHER = "/c";
static final String LINUX_MAC_SWITCHER = "-c";
static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized";
static final Pattern LINUX_MAC_PROCESS_ERROR_MESSAGE = Pattern.compile("(.*)az:(.*)not found");
static final String DEFAULT_WINDOWS_PS_EXECUTABLE = "pwsh.exe";
static final String LEGACY_WINDOWS_PS_EXECUTABLE = "powershell.exe";
static final String DEFAULT_LINUX_PS_EXECUTABLE = "pwsh";
static final String DEFAULT_MAC_LINUX_PATH = "/bin/";
static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01";
static final String MSI_ENDPOINT_VERSION = "2017-09-01";
static final String ARC_MANAGED_IDENTITY_ENDPOINT_API_VERSION = "2019-11-01";
static final String ADFS_TENANT = "adfs";
static final String HTTP_LOCALHOST = "http:
static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview";
static final ClientLogger LOGGER = new ClientLogger(IdentityClient.class);
static final Pattern ACCESS_TOKEN_PATTERN = Pattern.compile("\"accessToken\": \"(.*?)(\"|$)");
static final Pattern TRAILING_FORWARD_SLASHES = Pattern.compile("/+$");
private static final String AZURE_IDENTITY_PROPERTIES = "azure-identity.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private final Map<String, String> properties;
final IdentityClientOptions options;
final String tenantId;
final String clientId;
final String resourceId;
final String clientSecret;
final String clientAssertionFilePath;
final InputStream certificate;
final String certificatePath;
final Supplier<String> clientAssertionSupplier;
final String certificatePassword;
HttpPipelineAdapter httpPipelineAdapter;
String userAgent = UserAgentUtil.DEFAULT_USER_AGENT_HEADER;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param resourceId the resource ID of the application
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param clientAssertionTimeout the timeout to use for the client assertion.
* @param options the options configuring the client.
*/
IdentityClientBase(String tenantId, String clientId, String clientSecret, String certificatePath,
String clientAssertionFilePath, String resourceId, Supplier<String> clientAssertionSupplier,
InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
Duration clientAssertionTimeout, IdentityClientOptions options) {
if (tenantId == null) {
tenantId = IdentityUtil.DEFAULT_TENANT;
options.setAdditionallyAllowedTenants(Collections.singletonList(IdentityUtil.ALL_TENANTS));
}
if (options == null) {
options = new IdentityClientOptions();
}
this.tenantId = tenantId;
this.clientId = clientId;
this.resourceId = resourceId;
this.clientSecret = clientSecret;
this.clientAssertionFilePath = clientAssertionFilePath;
this.certificatePath = certificatePath;
this.certificate = certificate;
this.certificatePassword = certificatePassword;
this.clientAssertionSupplier = clientAssertionSupplier;
this.options = options;
properties = CoreUtils.getProperties(AZURE_IDENTITY_PROPERTIES);
}
ConfidentialClientApplication getConfidentialClient() {
if (clientId == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/"
+ tenantId;
IClientCredential credential;
if (clientSecret != null) {
credential = ClientCredentialFactory.createFromSecret(clientSecret);
} else if (certificate != null || certificatePath != null) {
try {
if (certificatePassword == null) {
byte[] pemCertificateBytes = getCertificateBytes();
List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);
PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);
if (x509CertificateList.size() == 1) {
credential = ClientCredentialFactory.createFromCertificate(
privateKey, x509CertificateList.get(0));
} else {
credential = ClientCredentialFactory.createFromCertificateChain(
privateKey, x509CertificateList);
}
} else {
try (InputStream pfxCertificateStream = getCertificateInputStream()) {
credential = ClientCredentialFactory.createFromCertificate(pfxCertificateStream,
certificatePassword);
}
}
} catch (IOException | GeneralSecurityException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(
"Failed to parse the certificate for the credential: " + e.getMessage(), e));
}
} else if (clientAssertionSupplier != null) {
credential = ClientCredentialFactory.createFromClientAssertion(clientAssertionSupplier.get());
} else {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Must provide client secret or client certificate path."
+ " To mitigate this issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId, credential);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
applicationBuilder.sendX5c(options.isIncludeX5c());
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();
PersistentTokenCacheImpl tokenCache = null;
if (tokenCachePersistenceOptions != null) {
try {
tokenCache = new PersistentTokenCacheImpl()
.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())
.setName(tokenCachePersistenceOptions.getName());
applicationBuilder.setTokenCacheAccessAspect(tokenCache);
} catch (Throwable t) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
if (options.getRegionalAuthority() != null) {
if (options.getRegionalAuthority() == RegionalAuthority.AUTO_DISCOVER_REGION) {
applicationBuilder.autoDetectRegion(true);
} else {
applicationBuilder.azureRegion(options.getRegionalAuthority().toString());
}
}
ConfidentialClientApplication confidentialClientApplication = applicationBuilder.build();
if (tokenCache != null) {
tokenCache.registerCache();
}
return confidentialClientApplication;
}
PublicClientApplication getPublicClient(boolean sharedTokenCacheCredential) {
if (clientId == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/"
+ tenantId;
PublicClientApplication.Builder builder = PublicClientApplication.builder(clientId);
try {
builder = builder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
builder.httpClient(httpPipelineAdapter);
} else {
builder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
builder.executorService(options.getExecutorService());
}
if (!options.isCp1Disabled()) {
Set<String> set = new HashSet<>(1);
set.add("CP1");
builder.clientCapabilities(set);
}
TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();
PersistentTokenCacheImpl tokenCache = null;
if (tokenCachePersistenceOptions != null) {
try {
tokenCache = new PersistentTokenCacheImpl()
.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())
.setName(tokenCachePersistenceOptions.getName());
builder.setTokenCacheAccessAspect(tokenCache);
} catch (Throwable t) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
PublicClientApplication publicClientApplication = builder.build();
if (tokenCache != null) {
tokenCache.registerCache();
}
return publicClientApplication;
}
ConfidentialClientApplication getManagedIdentityConfidentialClient() {
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("")
+ "/" + tenantId;
IClientCredential credential = ClientCredentialFactory
.createFromSecret(clientSecret != null ? clientSecret : "dummy-secret");
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId == null ? "SYSTEM-ASSIGNED-MANAGED-IDENTITY"
: clientId, credential);
applicationBuilder.validateAuthority(false);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
if (options.getManagedIdentityType() == null) {
throw LOGGER.logExceptionAsError(
new CredentialUnavailableException("Managed Identity type not configured, authentication not available."));
}
applicationBuilder.appTokenProvider(appTokenProviderParameters -> {
TokenRequestContext trc = new TokenRequestContext()
.setScopes(new ArrayList<>(appTokenProviderParameters.scopes))
.setClaims(appTokenProviderParameters.claims)
.setTenantId(appTokenProviderParameters.tenantId);
Mono<AccessToken> accessTokenAsync = getTokenFromTargetManagedIdentity(trc);
return accessTokenAsync.map(accessToken -> {
TokenProviderResult result = new TokenProviderResult();
result.setAccessToken(accessToken.getToken());
result.setTenantId(trc.getTenantId());
result.setExpiresInSeconds(accessToken.getExpiresAt().toEpochSecond());
return result;
}).toFuture();
});
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
return applicationBuilder.build();
}
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder buildDeviceCodeFlowParameters(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) {
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =
DeviceCodeFlowParameters.builder(
new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(
new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),
OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return parametersBuilder;
}
OnBehalfOfParameters buildOBOFlowParameters(TokenRequestContext request) {
return OnBehalfOfParameters
.builder(new HashSet<>(request.getScopes()), options.getUserAssertion())
.tenant(IdentityUtil.resolveTenantId(tenantId, request, options))
.build();
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder buildInteractiveRequestParameters(TokenRequestContext request, String loginHint, URI redirectUri) {
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
InteractiveRequestParameters.builder(redirectUri)
.scopes(new HashSet<>(request.getScopes()))
.prompt(Prompt.SELECT_ACCOUNT)
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
builder.claims(customClaimRequest);
}
if (loginHint != null) {
builder.loginHint(loginHint);
}
return builder;
}
UserNamePasswordParameters.UserNamePasswordParametersBuilder buildUsernamePasswordFlowParameters(TokenRequestContext request, String username, String password) {
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),
username, password.toCharArray());
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
userNamePasswordParametersBuilder.claims(customClaimRequest);
}
userNamePasswordParametersBuilder.tenant(
IdentityUtil.resolveTenantId(tenantId, request, options));
return userNamePasswordParametersBuilder;
}
AccessToken getTokenFromAzureCLIAuthentication(StringBuilder azCommand) {
AccessToken token;
try {
String starter;
String switcher;
if (isWindowsPlatform()) {
starter = WINDOWS_STARTER;
switcher = WINDOWS_SWITCHER;
} else {
starter = LINUX_MAC_STARTER;
switcher = LINUX_MAC_SWITCHER;
}
ProcessBuilder builder = new ProcessBuilder(starter, switcher, azCommand.toString());
String workingDirectory = getSafeWorkingDirectory();
if (workingDirectory != null) {
builder.directory(new File(workingDirectory));
} else {
throw LOGGER.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be"
+ " found to execute CLI command from. To mitigate this issue, please refer to the troubleshooting "
+ " guidelines here at https:
}
builder.redirectErrorStream(true);
Process process = builder.start();
StringBuilder output = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(),
StandardCharsets.UTF_8))) {
String line;
while (true) {
line = reader.readLine();
if (line == null) {
break;
}
if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE)
|| LINUX_MAC_PROCESS_ERROR_MESSAGE.matcher(line).matches()) {
throw LoggingUtil.logCredentialUnavailableException(LOGGER, options,
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable. Azure CLI not installed."
+ "To mitigate this issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
output.append(line);
}
}
String processOutput = output.toString();
process.waitFor(10, TimeUnit.SECONDS);
if (process.exitValue() != 0) {
if (processOutput.length() > 0) {
String redactedOutput = redactInfo(processOutput);
if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) {
throw LoggingUtil.logCredentialUnavailableException(LOGGER, options,
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable."
+ " Please run 'az login' to set up account. To further mitigate this"
+ " issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));
} else {
throw LOGGER.logExceptionAsError(
new ClientAuthenticationException("Failed to invoke Azure CLI ", null));
}
}
LOGGER.verbose("Azure CLI Authentication => A token response was received from Azure CLI, deserializing the"
+ " response into an Access Token.");
Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,
SerializerEncoding.JSON);
String accessToken = objectMap.get("accessToken");
String time = objectMap.get("expiresOn");
String timeToSecond = time.substring(0, time.indexOf("."));
String timeJoinedWithT = String.join("T", timeToSecond.split(" "));
OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)
.atZone(ZoneId.systemDefault())
.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);
token = new AccessToken(accessToken, expiresOn);
} catch (IOException | InterruptedException e) {
throw LOGGER.logExceptionAsError(new IllegalStateException(e));
}
return token;
}
String getSafeWorkingDirectory() {
if (isWindowsPlatform()) {
String windowsSystemRoot = System.getenv("SystemRoot");
if (CoreUtils.isNullOrEmpty(windowsSystemRoot)) {
return null;
}
return windowsSystemRoot + "\\system32";
} else {
return DEFAULT_MAC_LINUX_PATH;
}
}
boolean isWindowsPlatform() {
return System.getProperty("os.name").contains("Windows");
}
String redactInfo(String input) {
return ACCESS_TOKEN_PATTERN.matcher(input).replaceAll("****");
}
abstract Mono<AccessToken> getTokenFromTargetManagedIdentity(TokenRequestContext tokenRequestContext);
HttpPipeline setupPipeline(HttpClient httpClient) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
HttpLogOptions httpLogOptions = new HttpLogOptions();
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
Configuration buildConfiguration = Configuration.getGlobalConfiguration().clone();
userAgent = UserAgentUtil.toUserAgentString(null, clientName, clientVersion, buildConfiguration);
policies.add(new UserAgentPolicy(userAgent));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RetryPolicy());
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder().httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0])).build();
}
void initializeHttpPipelineAdapter() {
HttpPipeline httpPipeline = options.getHttpPipeline();
if (httpPipeline != null) {
httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline, options);
} else {
HttpClient httpClient = options.getHttpClient();
if (httpClient != null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient), options);
} else if (options.getProxyOptions() == null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()), options);
}
}
}
private InputStream getCertificateInputStream() throws IOException {
if (certificatePath != null) {
return new BufferedInputStream(new FileInputStream(certificatePath));
} else {
return certificate;
}
}
private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {
switch (options.getType()) {
case SOCKS4:
case SOCKS5:
return new Proxy(Proxy.Type.SOCKS, options.getAddress());
case HTTP:
default:
return new Proxy(Proxy.Type.HTTP, options.getAddress());
}
}
} |
I don't think there is actually a maximum in the certificate specs. Particularly since a PFX can bundle lots of things. I was just curious. | private byte[] getCertificateBytes() throws IOException {
if (certificatePath != null) {
return Files.readAllBytes(Paths.get(certificatePath));
} else if (certificate != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int read = certificate.read(buffer, 0, buffer.length);
while (read != -1) {
outputStream.write(buffer, 0, read);
read = certificate.read(buffer, 0, buffer.length);
}
return outputStream.toByteArray();
} else {
return new byte[0];
}
} | byte[] buffer = new byte[4096]; | private byte[] getCertificateBytes() throws IOException {
if (certificatePath != null) {
return Files.readAllBytes(Paths.get(certificatePath));
} else if (certificate != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int read = certificate.read(buffer, 0, buffer.length);
while (read != -1) {
outputStream.write(buffer, 0, read);
read = certificate.read(buffer, 0, buffer.length);
}
return outputStream.toByteArray();
} else {
return new byte[0];
}
} | class IdentityClientBase {
static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();
static final String WINDOWS_STARTER = "cmd.exe";
static final String LINUX_MAC_STARTER = "/bin/sh";
static final String WINDOWS_SWITCHER = "/c";
static final String LINUX_MAC_SWITCHER = "-c";
static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized";
static final Pattern LINUX_MAC_PROCESS_ERROR_MESSAGE = Pattern.compile("(.*)az:(.*)not found");
static final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv("SystemRoot");
static final String DEFAULT_WINDOWS_PS_EXECUTABLE = "pwsh.exe";
static final String LEGACY_WINDOWS_PS_EXECUTABLE = "powershell.exe";
static final String DEFAULT_LINUX_PS_EXECUTABLE = "pwsh";
static final String DEFAULT_MAC_LINUX_PATH = "/bin/";
static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01";
static final String MSI_ENDPOINT_VERSION = "2017-09-01";
static final String ARC_MANAGED_IDENTITY_ENDPOINT_API_VERSION = "2019-11-01";
static final String ADFS_TENANT = "adfs";
static final String HTTP_LOCALHOST = "http:
static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview";
static final ClientLogger LOGGER = new ClientLogger(IdentityClient.class);
static final Pattern ACCESS_TOKEN_PATTERN = Pattern.compile("\"accessToken\": \"(.*?)(\"|$)");
static final Pattern TRAILING_FORWARD_SLASHES = Pattern.compile("/+$");
private static final String AZURE_IDENTITY_PROPERTIES = "azure-identity.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private final Map<String, String> properties;
final IdentityClientOptions options;
final String tenantId;
final String clientId;
final String resourceId;
final String clientSecret;
final String clientAssertionFilePath;
final InputStream certificate;
final String certificatePath;
final Supplier<String> clientAssertionSupplier;
final String certificatePassword;
HttpPipelineAdapter httpPipelineAdapter;
String userAgent = UserAgentUtil.DEFAULT_USER_AGENT_HEADER;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param resourceId the resource ID of the application
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param clientAssertionTimeout the timeout to use for the client assertion.
* @param options the options configuring the client.
*/
IdentityClientBase(String tenantId, String clientId, String clientSecret, String certificatePath,
String clientAssertionFilePath, String resourceId, Supplier<String> clientAssertionSupplier,
InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
Duration clientAssertionTimeout, IdentityClientOptions options) {
if (tenantId == null) {
tenantId = IdentityUtil.DEFAULT_TENANT;
options.setAdditionallyAllowedTenants(Collections.singletonList(IdentityUtil.ALL_TENANTS));
}
if (options == null) {
options = new IdentityClientOptions();
}
this.tenantId = tenantId;
this.clientId = clientId;
this.resourceId = resourceId;
this.clientSecret = clientSecret;
this.clientAssertionFilePath = clientAssertionFilePath;
this.certificatePath = certificatePath;
this.certificate = certificate;
this.certificatePassword = certificatePassword;
this.clientAssertionSupplier = clientAssertionSupplier;
this.options = options;
properties = CoreUtils.getProperties(AZURE_IDENTITY_PROPERTIES);
}
ConfidentialClientApplication getConfidentialClient() {
if (clientId == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/"
+ tenantId;
IClientCredential credential;
if (clientSecret != null) {
credential = ClientCredentialFactory.createFromSecret(clientSecret);
} else if (certificate != null || certificatePath != null) {
try {
if (certificatePassword == null) {
byte[] pemCertificateBytes = getCertificateBytes();
List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);
PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);
if (x509CertificateList.size() == 1) {
credential = ClientCredentialFactory.createFromCertificate(
privateKey, x509CertificateList.get(0));
} else {
credential = ClientCredentialFactory.createFromCertificateChain(
privateKey, x509CertificateList);
}
} else {
try (InputStream pfxCertificateStream = getCertificateInputStream()) {
credential = ClientCredentialFactory.createFromCertificate(pfxCertificateStream,
certificatePassword);
}
}
} catch (IOException | GeneralSecurityException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(
"Failed to parse the certificate for the credential: " + e.getMessage(), e));
}
} else if (clientAssertionSupplier != null) {
credential = ClientCredentialFactory.createFromClientAssertion(clientAssertionSupplier.get());
} else {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Must provide client secret or client certificate path."
+ " To mitigate this issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId, credential);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
applicationBuilder.sendX5c(options.isIncludeX5c());
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();
PersistentTokenCacheImpl tokenCache = null;
if (tokenCachePersistenceOptions != null) {
try {
tokenCache = new PersistentTokenCacheImpl()
.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())
.setName(tokenCachePersistenceOptions.getName());
applicationBuilder.setTokenCacheAccessAspect(tokenCache);
} catch (Throwable t) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
if (options.getRegionalAuthority() != null) {
if (options.getRegionalAuthority() == RegionalAuthority.AUTO_DISCOVER_REGION) {
applicationBuilder.autoDetectRegion(true);
} else {
applicationBuilder.azureRegion(options.getRegionalAuthority().toString());
}
}
ConfidentialClientApplication confidentialClientApplication = applicationBuilder.build();
if (tokenCache != null) {
tokenCache.registerCache();
}
return confidentialClientApplication;
}
PublicClientApplication getPublicClient(boolean sharedTokenCacheCredential) {
if (clientId == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/"
+ tenantId;
PublicClientApplication.Builder builder = PublicClientApplication.builder(clientId);
try {
builder = builder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
builder.httpClient(httpPipelineAdapter);
} else {
builder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
builder.executorService(options.getExecutorService());
}
if (!options.isCp1Disabled()) {
Set<String> set = new HashSet<>(1);
set.add("CP1");
builder.clientCapabilities(set);
}
TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();
PersistentTokenCacheImpl tokenCache = null;
if (tokenCachePersistenceOptions != null) {
try {
tokenCache = new PersistentTokenCacheImpl()
.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())
.setName(tokenCachePersistenceOptions.getName());
builder.setTokenCacheAccessAspect(tokenCache);
} catch (Throwable t) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
PublicClientApplication publicClientApplication = builder.build();
if (tokenCache != null) {
tokenCache.registerCache();
}
return publicClientApplication;
}
ConfidentialClientApplication getManagedIdentityConfidentialClient() {
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("")
+ "/" + tenantId;
IClientCredential credential = ClientCredentialFactory
.createFromSecret(clientSecret != null ? clientSecret : "dummy-secret");
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId == null ? "SYSTEM-ASSIGNED-MANAGED-IDENTITY"
: clientId, credential);
applicationBuilder.validateAuthority(false);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
if (options.getManagedIdentityType() == null) {
throw LOGGER.logExceptionAsError(
new CredentialUnavailableException("Managed Identity type not configured, authentication not available."));
}
applicationBuilder.appTokenProvider(appTokenProviderParameters -> {
TokenRequestContext trc = new TokenRequestContext()
.setScopes(new ArrayList<>(appTokenProviderParameters.scopes))
.setClaims(appTokenProviderParameters.claims)
.setTenantId(appTokenProviderParameters.tenantId);
Mono<AccessToken> accessTokenAsync = getTokenFromTargetManagedIdentity(trc);
return accessTokenAsync.map(accessToken -> {
TokenProviderResult result = new TokenProviderResult();
result.setAccessToken(accessToken.getToken());
result.setTenantId(trc.getTenantId());
result.setExpiresInSeconds(accessToken.getExpiresAt().toEpochSecond());
return result;
}).toFuture();
});
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
return applicationBuilder.build();
}
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder buildDeviceCodeFlowParameters(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) {
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =
DeviceCodeFlowParameters.builder(
new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(
new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),
OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return parametersBuilder;
}
OnBehalfOfParameters buildOBOFlowParameters(TokenRequestContext request) {
return OnBehalfOfParameters
.builder(new HashSet<>(request.getScopes()), options.getUserAssertion())
.tenant(IdentityUtil.resolveTenantId(tenantId, request, options))
.build();
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder buildInteractiveRequestParameters(TokenRequestContext request, String loginHint, URI redirectUri) {
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
InteractiveRequestParameters.builder(redirectUri)
.scopes(new HashSet<>(request.getScopes()))
.prompt(Prompt.SELECT_ACCOUNT)
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
builder.claims(customClaimRequest);
}
if (loginHint != null) {
builder.loginHint(loginHint);
}
return builder;
}
UserNamePasswordParameters.UserNamePasswordParametersBuilder buildUsernamePasswordFlowParameters(TokenRequestContext request, String username, String password) {
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),
username, password.toCharArray());
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
userNamePasswordParametersBuilder.claims(customClaimRequest);
}
userNamePasswordParametersBuilder.tenant(
IdentityUtil.resolveTenantId(tenantId, request, options));
return userNamePasswordParametersBuilder;
}
AccessToken getTokenFromAzureCLIAuthentication(StringBuilder azCommand) {
AccessToken token;
try {
String starter;
String switcher;
if (isWindowsPlatform()) {
starter = WINDOWS_STARTER;
switcher = WINDOWS_SWITCHER;
} else {
starter = LINUX_MAC_STARTER;
switcher = LINUX_MAC_SWITCHER;
}
ProcessBuilder builder = new ProcessBuilder(starter, switcher, azCommand.toString());
String workingDirectory = getSafeWorkingDirectory();
if (workingDirectory != null) {
builder.directory(new File(workingDirectory));
} else {
throw LOGGER.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be"
+ " found to execute CLI command from. To mitigate this issue, please refer to the troubleshooting "
+ " guidelines here at https:
}
builder.redirectErrorStream(true);
Process process = builder.start();
StringBuilder output = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(),
StandardCharsets.UTF_8))) {
String line;
while (true) {
line = reader.readLine();
if (line == null) {
break;
}
if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE)
|| LINUX_MAC_PROCESS_ERROR_MESSAGE.matcher(line).matches()) {
throw LoggingUtil.logCredentialUnavailableException(LOGGER, options,
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable. Azure CLI not installed."
+ "To mitigate this issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
output.append(line);
}
}
String processOutput = output.toString();
process.waitFor(10, TimeUnit.SECONDS);
if (process.exitValue() != 0) {
if (processOutput.length() > 0) {
String redactedOutput = redactInfo(processOutput);
if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) {
throw LoggingUtil.logCredentialUnavailableException(LOGGER, options,
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable."
+ " Please run 'az login' to set up account. To further mitigate this"
+ " issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));
} else {
throw LOGGER.logExceptionAsError(
new ClientAuthenticationException("Failed to invoke Azure CLI ", null));
}
}
LOGGER.verbose("Azure CLI Authentication => A token response was received from Azure CLI, deserializing the"
+ " response into an Access Token.");
Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,
SerializerEncoding.JSON);
String accessToken = objectMap.get("accessToken");
String time = objectMap.get("expiresOn");
String timeToSecond = time.substring(0, time.indexOf("."));
String timeJoinedWithT = String.join("T", timeToSecond.split(" "));
OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)
.atZone(ZoneId.systemDefault())
.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);
token = new AccessToken(accessToken, expiresOn);
} catch (IOException | InterruptedException e) {
throw LOGGER.logExceptionAsError(new IllegalStateException(e));
}
return token;
}
String getSafeWorkingDirectory() {
if (isWindowsPlatform()) {
if (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) {
return null;
}
return DEFAULT_WINDOWS_SYSTEM_ROOT + "\\system32";
} else {
return DEFAULT_MAC_LINUX_PATH;
}
}
boolean isWindowsPlatform() {
return System.getProperty("os.name").contains("Windows");
}
String redactInfo(String input) {
return ACCESS_TOKEN_PATTERN.matcher(input).replaceAll("****");
}
abstract Mono<AccessToken> getTokenFromTargetManagedIdentity(TokenRequestContext tokenRequestContext);
HttpPipeline setupPipeline(HttpClient httpClient) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
HttpLogOptions httpLogOptions = new HttpLogOptions();
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
Configuration buildConfiguration = Configuration.getGlobalConfiguration().clone();
userAgent = UserAgentUtil.toUserAgentString(null, clientName, clientVersion, buildConfiguration);
policies.add(new UserAgentPolicy(userAgent));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RetryPolicy());
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder().httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0])).build();
}
void initializeHttpPipelineAdapter() {
HttpPipeline httpPipeline = options.getHttpPipeline();
if (httpPipeline != null) {
httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline, options);
} else {
HttpClient httpClient = options.getHttpClient();
if (httpClient != null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient), options);
} else if (options.getProxyOptions() == null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()), options);
}
}
}
private InputStream getCertificateInputStream() throws IOException {
if (certificatePath != null) {
return new BufferedInputStream(new FileInputStream(certificatePath));
} else {
return certificate;
}
}
private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {
switch (options.getType()) {
case SOCKS4:
case SOCKS5:
return new Proxy(Proxy.Type.SOCKS, options.getAddress());
case HTTP:
default:
return new Proxy(Proxy.Type.HTTP, options.getAddress());
}
}
} | class IdentityClientBase {
static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter();
static final String WINDOWS_STARTER = "cmd.exe";
static final String LINUX_MAC_STARTER = "/bin/sh";
static final String WINDOWS_SWITCHER = "/c";
static final String LINUX_MAC_SWITCHER = "-c";
static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized";
static final Pattern LINUX_MAC_PROCESS_ERROR_MESSAGE = Pattern.compile("(.*)az:(.*)not found");
static final String DEFAULT_WINDOWS_PS_EXECUTABLE = "pwsh.exe";
static final String LEGACY_WINDOWS_PS_EXECUTABLE = "powershell.exe";
static final String DEFAULT_LINUX_PS_EXECUTABLE = "pwsh";
static final String DEFAULT_MAC_LINUX_PATH = "/bin/";
static final Duration REFRESH_OFFSET = Duration.ofMinutes(5);
static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01";
static final String MSI_ENDPOINT_VERSION = "2017-09-01";
static final String ARC_MANAGED_IDENTITY_ENDPOINT_API_VERSION = "2019-11-01";
static final String ADFS_TENANT = "adfs";
static final String HTTP_LOCALHOST = "http:
static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview";
static final ClientLogger LOGGER = new ClientLogger(IdentityClient.class);
static final Pattern ACCESS_TOKEN_PATTERN = Pattern.compile("\"accessToken\": \"(.*?)(\"|$)");
static final Pattern TRAILING_FORWARD_SLASHES = Pattern.compile("/+$");
private static final String AZURE_IDENTITY_PROPERTIES = "azure-identity.properties";
private static final String SDK_NAME = "name";
private static final String SDK_VERSION = "version";
private final Map<String, String> properties;
final IdentityClientOptions options;
final String tenantId;
final String clientId;
final String resourceId;
final String clientSecret;
final String clientAssertionFilePath;
final InputStream certificate;
final String certificatePath;
final Supplier<String> clientAssertionSupplier;
final String certificatePassword;
HttpPipelineAdapter httpPipelineAdapter;
String userAgent = UserAgentUtil.DEFAULT_USER_AGENT_HEADER;
/**
* Creates an IdentityClient with the given options.
*
* @param tenantId the tenant ID of the application.
* @param clientId the client ID of the application.
* @param clientSecret the client secret of the application.
* @param resourceId the resource ID of the application
* @param certificatePath the path to the PKCS12 or PEM certificate of the application.
* @param certificate the PKCS12 or PEM certificate of the application.
* @param certificatePassword the password protecting the PFX certificate.
* @param isSharedTokenCacheCredential Indicate whether the credential is
* {@link com.azure.identity.SharedTokenCacheCredential} or not.
* @param clientAssertionTimeout the timeout to use for the client assertion.
* @param options the options configuring the client.
*/
IdentityClientBase(String tenantId, String clientId, String clientSecret, String certificatePath,
String clientAssertionFilePath, String resourceId, Supplier<String> clientAssertionSupplier,
InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential,
Duration clientAssertionTimeout, IdentityClientOptions options) {
if (tenantId == null) {
tenantId = IdentityUtil.DEFAULT_TENANT;
options.setAdditionallyAllowedTenants(Collections.singletonList(IdentityUtil.ALL_TENANTS));
}
if (options == null) {
options = new IdentityClientOptions();
}
this.tenantId = tenantId;
this.clientId = clientId;
this.resourceId = resourceId;
this.clientSecret = clientSecret;
this.clientAssertionFilePath = clientAssertionFilePath;
this.certificatePath = certificatePath;
this.certificate = certificate;
this.certificatePassword = certificatePassword;
this.clientAssertionSupplier = clientAssertionSupplier;
this.options = options;
properties = CoreUtils.getProperties(AZURE_IDENTITY_PROPERTIES);
}
ConfidentialClientApplication getConfidentialClient() {
if (clientId == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/"
+ tenantId;
IClientCredential credential;
if (clientSecret != null) {
credential = ClientCredentialFactory.createFromSecret(clientSecret);
} else if (certificate != null || certificatePath != null) {
try {
if (certificatePassword == null) {
byte[] pemCertificateBytes = getCertificateBytes();
List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes);
PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes);
if (x509CertificateList.size() == 1) {
credential = ClientCredentialFactory.createFromCertificate(
privateKey, x509CertificateList.get(0));
} else {
credential = ClientCredentialFactory.createFromCertificateChain(
privateKey, x509CertificateList);
}
} else {
try (InputStream pfxCertificateStream = getCertificateInputStream()) {
credential = ClientCredentialFactory.createFromCertificate(pfxCertificateStream,
certificatePassword);
}
}
} catch (IOException | GeneralSecurityException e) {
throw LOGGER.logExceptionAsError(new RuntimeException(
"Failed to parse the certificate for the credential: " + e.getMessage(), e));
}
} else if (clientAssertionSupplier != null) {
credential = ClientCredentialFactory.createFromClientAssertion(clientAssertionSupplier.get());
} else {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Must provide client secret or client certificate path."
+ " To mitigate this issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId, credential);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
applicationBuilder.sendX5c(options.isIncludeX5c());
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();
PersistentTokenCacheImpl tokenCache = null;
if (tokenCachePersistenceOptions != null) {
try {
tokenCache = new PersistentTokenCacheImpl()
.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())
.setName(tokenCachePersistenceOptions.getName());
applicationBuilder.setTokenCacheAccessAspect(tokenCache);
} catch (Throwable t) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
if (options.getRegionalAuthority() != null) {
if (options.getRegionalAuthority() == RegionalAuthority.AUTO_DISCOVER_REGION) {
applicationBuilder.autoDetectRegion(true);
} else {
applicationBuilder.azureRegion(options.getRegionalAuthority().toString());
}
}
ConfidentialClientApplication confidentialClientApplication = applicationBuilder.build();
if (tokenCache != null) {
tokenCache.registerCache();
}
return confidentialClientApplication;
}
PublicClientApplication getPublicClient(boolean sharedTokenCacheCredential) {
if (clientId == null) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"A non-null value for client ID must be provided for user authentication."));
}
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/"
+ tenantId;
PublicClientApplication.Builder builder = PublicClientApplication.builder(clientId);
try {
builder = builder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
builder.httpClient(httpPipelineAdapter);
} else {
builder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
builder.executorService(options.getExecutorService());
}
if (!options.isCp1Disabled()) {
Set<String> set = new HashSet<>(1);
set.add("CP1");
builder.clientCapabilities(set);
}
TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions();
PersistentTokenCacheImpl tokenCache = null;
if (tokenCachePersistenceOptions != null) {
try {
tokenCache = new PersistentTokenCacheImpl()
.setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed())
.setName(tokenCachePersistenceOptions.getName());
builder.setTokenCacheAccessAspect(tokenCache);
} catch (Throwable t) {
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(
"Shared token cache is unavailable in this environment.", null, t));
}
}
PublicClientApplication publicClientApplication = builder.build();
if (tokenCache != null) {
tokenCache.registerCache();
}
return publicClientApplication;
}
ConfidentialClientApplication getManagedIdentityConfidentialClient() {
String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("")
+ "/" + tenantId;
IClientCredential credential = ClientCredentialFactory
.createFromSecret(clientSecret != null ? clientSecret : "dummy-secret");
ConfidentialClientApplication.Builder applicationBuilder =
ConfidentialClientApplication.builder(clientId == null ? "SYSTEM-ASSIGNED-MANAGED-IDENTITY"
: clientId, credential);
applicationBuilder.validateAuthority(false);
try {
applicationBuilder = applicationBuilder.authority(authorityUrl);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsWarning(new IllegalStateException(e));
}
if (options.getManagedIdentityType() == null) {
throw LOGGER.logExceptionAsError(
new CredentialUnavailableException("Managed Identity type not configured, authentication not available."));
}
applicationBuilder.appTokenProvider(appTokenProviderParameters -> {
TokenRequestContext trc = new TokenRequestContext()
.setScopes(new ArrayList<>(appTokenProviderParameters.scopes))
.setClaims(appTokenProviderParameters.claims)
.setTenantId(appTokenProviderParameters.tenantId);
Mono<AccessToken> accessTokenAsync = getTokenFromTargetManagedIdentity(trc);
return accessTokenAsync.map(accessToken -> {
TokenProviderResult result = new TokenProviderResult();
result.setAccessToken(accessToken.getToken());
result.setTenantId(trc.getTenantId());
result.setExpiresInSeconds(accessToken.getExpiresAt().toEpochSecond());
return result;
}).toFuture();
});
initializeHttpPipelineAdapter();
if (httpPipelineAdapter != null) {
applicationBuilder.httpClient(httpPipelineAdapter);
} else {
applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions()));
}
if (options.getExecutorService() != null) {
applicationBuilder.executorService(options.getExecutorService());
}
return applicationBuilder.build();
}
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder buildDeviceCodeFlowParameters(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) {
DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder =
DeviceCodeFlowParameters.builder(
new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(
new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(),
OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message())))
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
parametersBuilder.claims(customClaimRequest);
}
return parametersBuilder;
}
OnBehalfOfParameters buildOBOFlowParameters(TokenRequestContext request) {
return OnBehalfOfParameters
.builder(new HashSet<>(request.getScopes()), options.getUserAssertion())
.tenant(IdentityUtil.resolveTenantId(tenantId, request, options))
.build();
}
InteractiveRequestParameters.InteractiveRequestParametersBuilder buildInteractiveRequestParameters(TokenRequestContext request, String loginHint, URI redirectUri) {
InteractiveRequestParameters.InteractiveRequestParametersBuilder builder =
InteractiveRequestParameters.builder(redirectUri)
.scopes(new HashSet<>(request.getScopes()))
.prompt(Prompt.SELECT_ACCOUNT)
.tenant(IdentityUtil
.resolveTenantId(tenantId, request, options));
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims());
builder.claims(customClaimRequest);
}
if (loginHint != null) {
builder.loginHint(loginHint);
}
return builder;
}
UserNamePasswordParameters.UserNamePasswordParametersBuilder buildUsernamePasswordFlowParameters(TokenRequestContext request, String username, String password) {
UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder =
UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()),
username, password.toCharArray());
if (request.getClaims() != null) {
ClaimsRequest customClaimRequest = CustomClaimRequest
.formatAsClaimsRequest(request.getClaims());
userNamePasswordParametersBuilder.claims(customClaimRequest);
}
userNamePasswordParametersBuilder.tenant(
IdentityUtil.resolveTenantId(tenantId, request, options));
return userNamePasswordParametersBuilder;
}
AccessToken getTokenFromAzureCLIAuthentication(StringBuilder azCommand) {
AccessToken token;
try {
String starter;
String switcher;
if (isWindowsPlatform()) {
starter = WINDOWS_STARTER;
switcher = WINDOWS_SWITCHER;
} else {
starter = LINUX_MAC_STARTER;
switcher = LINUX_MAC_SWITCHER;
}
ProcessBuilder builder = new ProcessBuilder(starter, switcher, azCommand.toString());
String workingDirectory = getSafeWorkingDirectory();
if (workingDirectory != null) {
builder.directory(new File(workingDirectory));
} else {
throw LOGGER.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be"
+ " found to execute CLI command from. To mitigate this issue, please refer to the troubleshooting "
+ " guidelines here at https:
}
builder.redirectErrorStream(true);
Process process = builder.start();
StringBuilder output = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(),
StandardCharsets.UTF_8))) {
String line;
while (true) {
line = reader.readLine();
if (line == null) {
break;
}
if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE)
|| LINUX_MAC_PROCESS_ERROR_MESSAGE.matcher(line).matches()) {
throw LoggingUtil.logCredentialUnavailableException(LOGGER, options,
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable. Azure CLI not installed."
+ "To mitigate this issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
output.append(line);
}
}
String processOutput = output.toString();
process.waitFor(10, TimeUnit.SECONDS);
if (process.exitValue() != 0) {
if (processOutput.length() > 0) {
String redactedOutput = redactInfo(processOutput);
if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) {
throw LoggingUtil.logCredentialUnavailableException(LOGGER, options,
new CredentialUnavailableException(
"AzureCliCredential authentication unavailable."
+ " Please run 'az login' to set up account. To further mitigate this"
+ " issue, please refer to the troubleshooting guidelines here at "
+ "https:
}
throw LOGGER.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null));
} else {
throw LOGGER.logExceptionAsError(
new ClientAuthenticationException("Failed to invoke Azure CLI ", null));
}
}
LOGGER.verbose("Azure CLI Authentication => A token response was received from Azure CLI, deserializing the"
+ " response into an Access Token.");
Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class,
SerializerEncoding.JSON);
String accessToken = objectMap.get("accessToken");
String time = objectMap.get("expiresOn");
String timeToSecond = time.substring(0, time.indexOf("."));
String timeJoinedWithT = String.join("T", timeToSecond.split(" "));
OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME)
.atZone(ZoneId.systemDefault())
.toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC);
token = new AccessToken(accessToken, expiresOn);
} catch (IOException | InterruptedException e) {
throw LOGGER.logExceptionAsError(new IllegalStateException(e));
}
return token;
}
String getSafeWorkingDirectory() {
if (isWindowsPlatform()) {
String windowsSystemRoot = System.getenv("SystemRoot");
if (CoreUtils.isNullOrEmpty(windowsSystemRoot)) {
return null;
}
return windowsSystemRoot + "\\system32";
} else {
return DEFAULT_MAC_LINUX_PATH;
}
}
boolean isWindowsPlatform() {
return System.getProperty("os.name").contains("Windows");
}
String redactInfo(String input) {
return ACCESS_TOKEN_PATTERN.matcher(input).replaceAll("****");
}
abstract Mono<AccessToken> getTokenFromTargetManagedIdentity(TokenRequestContext tokenRequestContext);
HttpPipeline setupPipeline(HttpClient httpClient) {
List<HttpPipelinePolicy> policies = new ArrayList<>();
HttpLogOptions httpLogOptions = new HttpLogOptions();
String clientName = properties.getOrDefault(SDK_NAME, "UnknownName");
String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion");
Configuration buildConfiguration = Configuration.getGlobalConfiguration().clone();
userAgent = UserAgentUtil.toUserAgentString(null, clientName, clientVersion, buildConfiguration);
policies.add(new UserAgentPolicy(userAgent));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(new RetryPolicy());
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder().httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0])).build();
}
void initializeHttpPipelineAdapter() {
HttpPipeline httpPipeline = options.getHttpPipeline();
if (httpPipeline != null) {
httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline, options);
} else {
HttpClient httpClient = options.getHttpClient();
if (httpClient != null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient), options);
} else if (options.getProxyOptions() == null) {
httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()), options);
}
}
}
private InputStream getCertificateInputStream() throws IOException {
if (certificatePath != null) {
return new BufferedInputStream(new FileInputStream(certificatePath));
} else {
return certificate;
}
}
private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) {
switch (options.getType()) {
case SOCKS4:
case SOCKS5:
return new Proxy(Proxy.Type.SOCKS, options.getAddress());
case HTTP:
default:
return new Proxy(Proxy.Type.HTTP, options.getAddress());
}
}
} |
Do we have test to cover this case? | private void deduplicate(List<String> changeList) {
Set<String> changeSet = new HashSet<>();
Iterator<String> iterator = changeList.iterator();
while (iterator.hasNext()) {
String change = iterator.next();
if (changeSet.contains(change)) {
iterator.remove();
} else {
changeSet.add(change);
}
}
} | iterator.remove(); | private void deduplicate(List<String> changeList) {
Set<String> changeSet = new HashSet<>();
Iterator<String> iterator = changeList.iterator();
while (iterator.hasNext()) {
String change = iterator.next();
if (changeSet.contains(change)) {
iterator.remove();
} else {
changeSet.add(change);
}
}
} | class got changed, two identical change logs will appear for this child class
deduplicateChangeLog();
}
private void deduplicateChangeLog() {
deduplicate(this.breakingChange);
deduplicate(this.newFeature);
} | class got changed, two identical change logs will appear for this child class
deduplicateChangeLog();
}
private void deduplicateChangeLog() {
deduplicate(this.breakingChange);
deduplicate(this.newFeature);
} |
Tested locally. Adding unit test case is a little bit tricky, since it involves comparing two jar files (and there's no existing test case for the change tool). Maybe we can add a packaging phase to the test lifecycle, we can see if we need that in our future work... | private void deduplicate(List<String> changeList) {
Set<String> changeSet = new HashSet<>();
Iterator<String> iterator = changeList.iterator();
while (iterator.hasNext()) {
String change = iterator.next();
if (changeSet.contains(change)) {
iterator.remove();
} else {
changeSet.add(change);
}
}
} | iterator.remove(); | private void deduplicate(List<String> changeList) {
Set<String> changeSet = new HashSet<>();
Iterator<String> iterator = changeList.iterator();
while (iterator.hasNext()) {
String change = iterator.next();
if (changeSet.contains(change)) {
iterator.remove();
} else {
changeSet.add(change);
}
}
} | class got changed, two identical change logs will appear for this child class
deduplicateChangeLog();
}
private void deduplicateChangeLog() {
deduplicate(this.breakingChange);
deduplicate(this.newFeature);
} | class got changed, two identical change logs will appear for this child class
deduplicateChangeLog();
}
private void deduplicateChangeLog() {
deduplicate(this.breakingChange);
deduplicate(this.newFeature);
} |
We'd better add the following code on line 152: ```java if (client == null) { return; } ``` | protected void configureHttpLogOptions(T builder) {
ClientOptionsProvider.ClientOptions client = getAzureProperties().getClient();
if (client instanceof ClientOptionsProvider.HttpClientOptions) {
HttpLogOptions logOptions =
HTTP_LOG_OPTIONS_CONVERTER.convert(((ClientOptionsProvider.HttpClientOptions) client).getLogging());
consumeHttpLogOptions().accept(builder, logOptions);
} else {
LOGGER.debug("The client properties of an http-based client is of type {}", client.getClass().getName());
}
} | LOGGER.debug("The client properties of an http-based client is of type {}", client.getClass().getName()); | protected void configureHttpLogOptions(T builder) {
ClientOptionsProvider.ClientOptions client = getAzureProperties().getClient();
if (client == null) {
return;
}
if (client instanceof ClientOptionsProvider.HttpClientOptions) {
HttpLogOptions logOptions =
HTTP_LOG_OPTIONS_CONVERTER.convert(((ClientOptionsProvider.HttpClientOptions) client).getLogging());
consumeHttpLogOptions().accept(builder, logOptions);
} else {
LOGGER.debug("The client properties of an http-based client is of type {}", client.getClass().getName());
}
} | class AbstractAzureHttpClientBuilderFactory<T> extends AbstractAzureServiceClientBuilderFactory<T> {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAzureHttpClientBuilderFactory.class);
private final HttpClientOptions httpClientOptions = new HttpClientOptions();
private HttpClientProvider httpClientProvider = new DefaultHttpProvider();
private final List<HttpPipelinePolicy> httpPipelinePolicies = new ArrayList<>();
private HttpPipeline httpPipeline;
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link ClientOptions}.
* @return The consumer of how the {@link T} builder consume a {@link ClientOptions}.
*/
protected abstract BiConsumer<T, ClientOptions> consumeClientOptions();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link HttpClient}.
* @return The consumer of how the {@link T} builder consume a {@link HttpClient}.
*/
protected abstract BiConsumer<T, HttpClient> consumeHttpClient();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link HttpPipelinePolicy}.
* @return The consumer of how the {@link T} builder consume a {@link HttpPipelinePolicy}.
*/
protected abstract BiConsumer<T, HttpPipelinePolicy> consumeHttpPipelinePolicy();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link HttpPipeline}.
* @return The consumer of how the {@link T} builder consume a {@link HttpPipeline}.
*/
protected abstract BiConsumer<T, HttpPipeline> consumeHttpPipeline();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link HttpLogOptions}.
* @return The consumer of how the {@link T} builder consume a {@link HttpLogOptions}.
*/
protected abstract BiConsumer<T, HttpLogOptions> consumeHttpLogOptions();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link RetryPolicy}.
* @return The consumer of how the {@link T} builder consume a {@link RetryPolicy}.
*/
protected abstract BiConsumer<T, RetryPolicy> consumeRetryPolicy();
@Override
protected void configureCore(T builder) {
super.configureCore(builder);
configureHttpClient(builder);
configureHttpLogOptions(builder);
}
/**
* Configure the {@link HttpClient} to the {@link T} builder. If a {@link HttpPipeline} is provided to the factory,
* the pipeline will be set to the builder. Otherwise, a {@link HttpClient} will be created and together with the
* {@link HttpPipelinePolicy} set to the factory will be configured to the builder.
*
* @param builder The builder of the HTTP-based service client.
*/
protected void configureHttpClient(T builder) {
consumeClientOptions().accept(builder, httpClientOptions);
if (this.httpPipeline != null) {
consumeHttpPipeline().accept(builder, this.httpPipeline);
} else {
configureHttpHeaders(builder);
configureHttpTransportProperties(builder);
configureHttpPipelinePolicies(builder);
final HttpClient httpClient = getHttpClientProvider().createInstance(this.httpClientOptions);
consumeHttpClient().accept(builder, httpClient);
}
}
@Override
protected void configureProxy(T builder) {
final ProxyOptionsProvider.ProxyOptions proxy = getAzureProperties().getProxy();
if (proxy == null) {
return;
}
if (proxy instanceof ProxyOptionsProvider.HttpProxyOptions) {
ProxyOptions proxyOptions = HTTP_PROXY_CONVERTER.convert((ProxyOptionsProvider.HttpProxyOptions) proxy);
if (proxyOptions != null) {
this.httpClientOptions.setProxyOptions(proxyOptions);
} else {
LOGGER.debug("No HTTP proxy properties available.");
}
} else {
LOGGER.debug("The provided proxy options is not a ProxyOptionsProvider.HttpProxyOptions type.");
}
}
@Override
protected BiConsumer<T, String> consumeApplicationId() {
return (builder, id) -> this.httpClientOptions.setApplicationId(id);
}
/**
* Configure the {@link Header} that will be sent with the HTTP requests made of the HTTP-based sdk client.
*
* @param builder The builder of the HTTP-based service client.
*/
protected void configureHttpHeaders(T builder) {
this.httpClientOptions.setHeaders(getHeaders());
}
/**
* Configure the {@link HttpLogOptions} to the builder.
*
* @param builder The builder of the HTTP-based service client.
*/
/**
* Configure the HTTP transport properties to the builder.
*
* @param builder The builder of the HTTP-based service client.
*/
protected void configureHttpTransportProperties(T builder) {
final ClientOptionsProvider.ClientOptions client = getAzureProperties().getClient();
if (client == null) {
return;
}
final ClientOptionsProvider.HttpClientOptions properties;
if (client instanceof ClientOptionsProvider.HttpClientOptions) {
properties = (ClientOptionsProvider.HttpClientOptions) client;
httpClientOptions.setWriteTimeout(properties.getWriteTimeout());
httpClientOptions.responseTimeout(properties.getResponseTimeout());
httpClientOptions.readTimeout(properties.getReadTimeout());
httpClientOptions.setConnectTimeout(properties.getConnectTimeout());
httpClientOptions.setConnectionIdleTimeout(properties.getConnectionIdleTimeout());
httpClientOptions.setMaximumConnectionPoolSize(properties.getMaximumConnectionPoolSize());
}
}
@Override
protected void configureRetry(T builder) {
AzureProperties azureProperties = getAzureProperties();
RetryOptionsProvider.RetryOptions retry = null;
if (azureProperties instanceof RetryOptionsProvider) {
retry = ((RetryOptionsProvider) azureProperties).getRetry();
}
if (retry == null) {
return;
}
RetryOptions retryOptions = HTTP_RETRY_CONVERTER.convert(retry);
if (retryOptions == null) {
LOGGER.debug("No HTTP retry properties available.");
return;
}
consumeRetryPolicy().accept(builder, new RetryPolicy(retryOptions));
}
/**
* Configure the set of {@link HttpPipelinePolicy} added via this factory to the builder.
*
* @param builder The builder of the HTTP-based service client.
*/
protected void configureHttpPipelinePolicies(T builder) {
for (HttpPipelinePolicy policy : this.httpPipelinePolicies) {
consumeHttpPipelinePolicy().accept(builder, policy);
}
}
/**
* Extract the HTTP headers from the {@link AzureProperties}.
* @return The list of HTTP headers will be sent with the HTTP requests made of the HTTP-based sdk client.
*/
protected List<Header> getHeaders() {
final ClientOptionsProvider.ClientOptions client = getAzureProperties().getClient();
if (client == null) {
return null;
}
if (!(client instanceof ClientOptionsProvider.HttpClientOptions)) {
LOGGER.debug("The clientOptions passed in is not of ClientOptionsProvider.HttpClientOptions.");
return null;
}
ClientOptionsProvider.HttpClientOptions clientOptions = (ClientOptionsProvider.HttpClientOptions) client;
if (clientOptions.getHeaders() == null) {
return null;
}
return clientOptions.getHeaders()
.stream()
.map(h -> new Header(h.getName(), h.getValues()))
.collect(Collectors.toList());
}
/**
* Get a set of {@link HttpPipelinePolicy} configured via this factory.
*
* @return The list of the http pipeline policy.
*/
protected List<HttpPipelinePolicy> getHttpPipelinePolicies() {
return Collections.unmodifiableList(this.httpPipelinePolicies);
}
/**
* Adds a {@link HttpPipelinePolicy} to the set of existing policies.
*
* @param policy The {@link HttpPipelinePolicy policy} to be added.
*/
public void addHttpPipelinePolicy(HttpPipelinePolicy policy) {
this.httpPipelinePolicies.add(policy);
}
/**
* Set the {@link HttpPipeline}.
*
* @param httpPipeline The http pipeline.
*/
public void setHttpPipeline(HttpPipeline httpPipeline) {
this.httpPipeline = httpPipeline;
}
/**
* Get the {@link HttpClientProvider}.
*
* @return The http client provider.
*/
protected HttpClientProvider getHttpClientProvider() {
return this.httpClientProvider;
}
/**
* Get the {@link HttpClientOptions}.
*
* @return The http client options.
*/
protected HttpClientOptions getHttpClientOptions() {
return this.httpClientOptions;
}
/**
* Set the {@link HttpClientProvider}.
*
* @param httpClientProvider The http client provider.
*/
public void setHttpClientProvider(HttpClientProvider httpClientProvider) {
if (httpClientProvider != null) {
this.httpClientProvider = httpClientProvider;
}
}
} | class AbstractAzureHttpClientBuilderFactory<T> extends AbstractAzureServiceClientBuilderFactory<T> {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAzureHttpClientBuilderFactory.class);
private final HttpClientOptions httpClientOptions = new HttpClientOptions();
private HttpClientProvider httpClientProvider = new DefaultHttpProvider();
private final List<HttpPipelinePolicy> httpPipelinePolicies = new ArrayList<>();
private HttpPipeline httpPipeline;
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link ClientOptions}.
* @return The consumer of how the {@link T} builder consume a {@link ClientOptions}.
*/
protected abstract BiConsumer<T, ClientOptions> consumeClientOptions();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link HttpClient}.
* @return The consumer of how the {@link T} builder consume a {@link HttpClient}.
*/
protected abstract BiConsumer<T, HttpClient> consumeHttpClient();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link HttpPipelinePolicy}.
* @return The consumer of how the {@link T} builder consume a {@link HttpPipelinePolicy}.
*/
protected abstract BiConsumer<T, HttpPipelinePolicy> consumeHttpPipelinePolicy();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link HttpPipeline}.
* @return The consumer of how the {@link T} builder consume a {@link HttpPipeline}.
*/
protected abstract BiConsumer<T, HttpPipeline> consumeHttpPipeline();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link HttpLogOptions}.
* @return The consumer of how the {@link T} builder consume a {@link HttpLogOptions}.
*/
protected abstract BiConsumer<T, HttpLogOptions> consumeHttpLogOptions();
/**
* Return a {@link BiConsumer} of how the {@link T} builder consume a {@link RetryPolicy}.
* @return The consumer of how the {@link T} builder consume a {@link RetryPolicy}.
*/
protected abstract BiConsumer<T, RetryPolicy> consumeRetryPolicy();
@Override
protected void configureCore(T builder) {
super.configureCore(builder);
configureHttpClient(builder);
configureHttpLogOptions(builder);
}
/**
* Configure the {@link HttpClient} to the {@link T} builder. If a {@link HttpPipeline} is provided to the factory,
* the pipeline will be set to the builder. Otherwise, a {@link HttpClient} will be created and together with the
* {@link HttpPipelinePolicy} set to the factory will be configured to the builder.
*
* @param builder The builder of the HTTP-based service client.
*/
protected void configureHttpClient(T builder) {
consumeClientOptions().accept(builder, httpClientOptions);
if (this.httpPipeline != null) {
consumeHttpPipeline().accept(builder, this.httpPipeline);
} else {
configureHttpHeaders(builder);
configureHttpTransportProperties(builder);
configureHttpPipelinePolicies(builder);
final HttpClient httpClient = getHttpClientProvider().createInstance(this.httpClientOptions);
consumeHttpClient().accept(builder, httpClient);
}
}
@Override
protected void configureProxy(T builder) {
final ProxyOptionsProvider.ProxyOptions proxy = getAzureProperties().getProxy();
if (proxy == null) {
return;
}
if (proxy instanceof ProxyOptionsProvider.HttpProxyOptions) {
ProxyOptions proxyOptions = HTTP_PROXY_CONVERTER.convert((ProxyOptionsProvider.HttpProxyOptions) proxy);
if (proxyOptions != null) {
this.httpClientOptions.setProxyOptions(proxyOptions);
} else {
LOGGER.debug("No HTTP proxy properties available.");
}
} else {
LOGGER.debug("The provided proxy options is not a ProxyOptionsProvider.HttpProxyOptions type.");
}
}
@Override
protected BiConsumer<T, String> consumeApplicationId() {
return (builder, id) -> this.httpClientOptions.setApplicationId(id);
}
/**
* Configure the {@link Header} that will be sent with the HTTP requests made of the HTTP-based sdk client.
*
* @param builder The builder of the HTTP-based service client.
*/
protected void configureHttpHeaders(T builder) {
this.httpClientOptions.setHeaders(getHeaders());
}
/**
* Configure the {@link HttpLogOptions} to the builder.
*
* @param builder The builder of the HTTP-based service client.
*/
/**
* Configure the HTTP transport properties to the builder.
*
* @param builder The builder of the HTTP-based service client.
*/
protected void configureHttpTransportProperties(T builder) {
final ClientOptionsProvider.ClientOptions client = getAzureProperties().getClient();
if (client == null) {
return;
}
final ClientOptionsProvider.HttpClientOptions properties;
if (client instanceof ClientOptionsProvider.HttpClientOptions) {
properties = (ClientOptionsProvider.HttpClientOptions) client;
httpClientOptions.setWriteTimeout(properties.getWriteTimeout());
httpClientOptions.responseTimeout(properties.getResponseTimeout());
httpClientOptions.readTimeout(properties.getReadTimeout());
httpClientOptions.setConnectTimeout(properties.getConnectTimeout());
httpClientOptions.setConnectionIdleTimeout(properties.getConnectionIdleTimeout());
httpClientOptions.setMaximumConnectionPoolSize(properties.getMaximumConnectionPoolSize());
}
}
@Override
protected void configureRetry(T builder) {
AzureProperties azureProperties = getAzureProperties();
RetryOptionsProvider.RetryOptions retry = null;
if (azureProperties instanceof RetryOptionsProvider) {
retry = ((RetryOptionsProvider) azureProperties).getRetry();
}
if (retry == null) {
return;
}
RetryOptions retryOptions = HTTP_RETRY_CONVERTER.convert(retry);
if (retryOptions == null) {
LOGGER.debug("No HTTP retry properties available.");
return;
}
consumeRetryPolicy().accept(builder, new RetryPolicy(retryOptions));
}
/**
* Configure the set of {@link HttpPipelinePolicy} added via this factory to the builder.
*
* @param builder The builder of the HTTP-based service client.
*/
protected void configureHttpPipelinePolicies(T builder) {
for (HttpPipelinePolicy policy : this.httpPipelinePolicies) {
consumeHttpPipelinePolicy().accept(builder, policy);
}
}
/**
* Extract the HTTP headers from the {@link AzureProperties}.
* @return The list of HTTP headers will be sent with the HTTP requests made of the HTTP-based sdk client.
*/
protected List<Header> getHeaders() {
final ClientOptionsProvider.ClientOptions client = getAzureProperties().getClient();
if (client == null) {
return null;
}
if (!(client instanceof ClientOptionsProvider.HttpClientOptions)) {
LOGGER.debug("The clientOptions passed in is not of ClientOptionsProvider.HttpClientOptions.");
return null;
}
ClientOptionsProvider.HttpClientOptions clientOptions = (ClientOptionsProvider.HttpClientOptions) client;
if (clientOptions.getHeaders() == null) {
return null;
}
return clientOptions.getHeaders()
.stream()
.map(h -> new Header(h.getName(), h.getValues()))
.collect(Collectors.toList());
}
/**
* Get a set of {@link HttpPipelinePolicy} configured via this factory.
*
* @return The list of the http pipeline policy.
*/
protected List<HttpPipelinePolicy> getHttpPipelinePolicies() {
return Collections.unmodifiableList(this.httpPipelinePolicies);
}
/**
* Adds a {@link HttpPipelinePolicy} to the set of existing policies.
*
* @param policy The {@link HttpPipelinePolicy policy} to be added.
*/
public void addHttpPipelinePolicy(HttpPipelinePolicy policy) {
this.httpPipelinePolicies.add(policy);
}
/**
* Set the {@link HttpPipeline}.
*
* @param httpPipeline The http pipeline.
*/
public void setHttpPipeline(HttpPipeline httpPipeline) {
this.httpPipeline = httpPipeline;
}
/**
* Get the {@link HttpClientProvider}.
*
* @return The http client provider.
*/
protected HttpClientProvider getHttpClientProvider() {
return this.httpClientProvider;
}
/**
* Get the {@link HttpClientOptions}.
*
* @return The http client options.
*/
protected HttpClientOptions getHttpClientOptions() {
return this.httpClientOptions;
}
/**
* Set the {@link HttpClientProvider}.
*
* @param httpClientProvider The http client provider.
*/
public void setHttpClientProvider(HttpClientProvider httpClientProvider) {
if (httpClientProvider != null) {
this.httpClientProvider = httpClientProvider;
}
}
} |
Is there a wiki/faq we can link to? How do I know the differences? | public NettyAsyncHttpClientBuilder connectionProvider(ConnectionProvider connectionProvider) {
if (connectionProvider != null) {
LOGGER.verbose("Setting ConnectionProvider for the Reactor Netty HttpClient. Please be aware of the "
+ "differences in runtime behavior when creating a default Reactor Netty HttpClient vs an HttpClient"
+ "with a specified ConnectionProvider, namely around handling for maximum number of connections and"
+ "maximum number of pending connections.");
}
this.connectionProvider = connectionProvider;
return this;
} | + "maximum number of pending connections."); | public NettyAsyncHttpClientBuilder connectionProvider(ConnectionProvider connectionProvider) {
if (connectionProvider != null) {
LOGGER.verbose("Setting ConnectionProvider for the Reactor Netty HttpClient. Please be aware of the "
+ "differences in runtime behavior when creating a default Reactor Netty HttpClient vs an HttpClient"
+ "with a specified ConnectionProvider. For more details see "
+ "https:
}
this.connectionProvider = connectionProvider;
return this;
} | class NettyAsyncHttpClientBuilder {
private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1);
private static final long DEFAULT_CONNECT_TIMEOUT;
private static final long DEFAULT_WRITE_TIMEOUT;
private static final long DEFAULT_RESPONSE_TIMEOUT;
private static final long DEFAULT_READ_TIMEOUT;
private static final ClientLogger LOGGER = new ClientLogger(NettyAsyncHttpClientBuilder.class);
static {
Configuration configuration = Configuration.getGlobalConfiguration();
DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), LOGGER).toMillis();
DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_RESPONSE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_RESPONSE_TIMEOUT, Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
}
private final HttpClient baseHttpClient;
private ProxyOptions proxyOptions;
private ConnectionProvider connectionProvider;
private boolean enableWiretap;
private int port = 80;
private EventLoopGroup eventLoopGroup;
private Configuration configuration;
private boolean disableBufferCopy;
private Duration connectTimeout;
private Duration writeTimeout;
private Duration responseTimeout;
private Duration readTimeout;
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link
* com.azure.core.http.HttpClient} backed by Reactor Netty.
*/
public NettyAsyncHttpClientBuilder() {
this.baseHttpClient = null;
}
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link HttpClient}
* based on the provided Reactor Netty HttpClient.
*
* <!-- src_embed com.azure.core.http.netty.from-existing-http-client -->
* <pre>
* &
* reactor.netty.http.client.HttpClient baseHttpClient = reactor.netty.http.client.HttpClient.create&
* .wiretap&
* &
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.from-existing-http-client -->
*
* @param nettyHttpClient base reactor netty HttpClient
*/
public NettyAsyncHttpClientBuilder(HttpClient nettyHttpClient) {
this.baseHttpClient = Objects.requireNonNull(nettyHttpClient, "'nettyHttpClient' cannot be null.");
}
/**
* Creates a new Netty-backed {@link com.azure.core.http.HttpClient} instance on every call, using the configuration
* set in the builder at the time of the build method call.
*
* @return A new Netty-backed {@link com.azure.core.http.HttpClient} instance.
* @throws IllegalStateException If the builder is configured to use an unknown proxy type.
*/
public com.azure.core.http.HttpClient build() {
HttpClient nettyHttpClient;
boolean addressResolverWasSetByBuilder = false;
if (this.baseHttpClient != null) {
nettyHttpClient = baseHttpClient;
} else if (this.connectionProvider != null) {
nettyHttpClient = HttpClient.create(this.connectionProvider).resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
} else {
nettyHttpClient = HttpClient.create().resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
}
HttpResponseDecoderSpec initialSpec = nettyHttpClient.configuration().decoder();
nettyHttpClient = nettyHttpClient
.port(port)
.wiretap(enableWiretap)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, (int) getTimeoutMillis(connectTimeout,
DEFAULT_CONNECT_TIMEOUT))
.httpResponseDecoder(httpResponseDecoderSpec -> initialSpec.validateHeaders(false));
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = proxyOptions == null
? ProxyOptions.fromConfiguration(buildConfiguration, true)
: proxyOptions;
/*
* Only configure the custom authorization challenge handler and challenge holder when using an authenticated
* HTTP proxy. All other proxying such as SOCKS4, SOCKS5, and anonymous HTTP will use Netty's built-in handlers.
*/
boolean useCustomProxyHandler = shouldUseCustomProxyHandler(buildProxyOptions);
AuthorizationChallengeHandler handler = useCustomProxyHandler
? new AuthorizationChallengeHandler(buildProxyOptions.getUsername(), buildProxyOptions.getPassword())
: null;
AtomicReference<ChallengeHolder> proxyChallengeHolder = useCustomProxyHandler ? new AtomicReference<>() : null;
boolean addProxyHandler = false;
Pattern nonProxyHostsPattern = null;
if (eventLoopGroup != null) {
nettyHttpClient = nettyHttpClient.runOn(eventLoopGroup);
}
if (buildProxyOptions != null) {
if (handler != null) {
addProxyHandler = true;
nonProxyHostsPattern = CoreUtils.isNullOrEmpty(buildProxyOptions.getNonProxyHosts())
? null
: Pattern.compile(buildProxyOptions.getNonProxyHosts(), Pattern.CASE_INSENSITIVE);
} else {
nettyHttpClient = nettyHttpClient.proxy(proxy ->
proxy.type(toReactorNettyProxyType(buildProxyOptions.getType()))
.address(buildProxyOptions.getAddress())
.username(buildProxyOptions.getUsername())
.password(ignored -> buildProxyOptions.getPassword())
.nonProxyHosts(buildProxyOptions.getNonProxyHosts()));
}
AddressResolverGroup<?> resolver = nettyHttpClient.configuration().resolver();
if (resolver == null || addressResolverWasSetByBuilder) {
nettyHttpClient = nettyHttpClient.resolver(NoopAddressResolverGroup.INSTANCE);
}
}
return new NettyAsyncHttpClient(nettyHttpClient, disableBufferCopy,
getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT),
getTimeoutMillis(responseTimeout, DEFAULT_RESPONSE_TIMEOUT), addProxyHandler, buildProxyOptions,
nonProxyHostsPattern, handler, proxyChallengeHolder);
}
/**
* Sets the connection provider.
*
* <p><strong>Code Sample</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
* <pre>
* &
* &
* &
* &
* ConnectionProvider connectionProvider = ConnectionProvider.builder&
* .maxConnections&
* .pendingAcquireMaxCount&
* .maxIdleTime&
* .build&
*
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .connectionProvider&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
*
* @param connectionProvider the connection provider
* @return the updated {@link NettyAsyncHttpClientBuilder} object.
*/
NettyAsyncHttpClientBuilder connectionProviderInternal(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
return this;
}
/**
* Sets the {@link ProxyOptions proxy options} that the client will use.
*
* @param proxyOptions The proxy configuration to use.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Enables the Netty wiretap feature.
*
* @param enableWiretap Flag indicating wiretap status
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder wiretap(boolean enableWiretap) {
this.enableWiretap = enableWiretap;
return this;
}
/**
* Sets the port which this client should connect, which by default will be set to port 80.
*
* @param port The port to connect to.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder port(int port) {
this.port = port;
return this;
}
/**
* Sets the NIO event loop group that will be used to run IO loops.
*
* @param nioEventLoopGroup The {@link NioEventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
* @deprecated deprecated in favor of {@link
*/
@Deprecated
public NettyAsyncHttpClientBuilder nioEventLoopGroup(NioEventLoopGroup nioEventLoopGroup) {
this.eventLoopGroup = nioEventLoopGroup;
return this;
}
/**
* Sets the IO event loop group that will be used to run IO loops.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder
* <pre>
* int threadCount = 5;
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder
*
* @param eventLoopGroup The {@link EventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder eventLoopGroup(EventLoopGroup eventLoopGroup) {
this.eventLoopGroup = eventLoopGroup;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Disables deep copy of response {@link ByteBuffer} into a heap location that is managed by this client as opposed
* to the underlying netty library which may use direct buffer pool.
* <br>
* <b>
* Caution: Disabling this is not recommended as it can lead to data corruption if the downstream consumers of the
* response do not handle the byte buffers before netty releases them.
* </b>
* If copy is disabled, underlying Netty layer can potentially reclaim byte array backed by the {@code ByteBuffer}
* upon the return of {@code onNext()}. So, users should ensure they process the {@link ByteBuffer} immediately and
* then return.
*
* <!-- src_embed com.azure.core.http.netty.disabled-buffer-copy -->
* <pre>
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .port&
* .disableBufferCopy&
* .build&
*
* client.send&
* .flatMapMany&
* .map&
* .subscribe&
* </pre>
* <!-- end com.azure.core.http.netty.disabled-buffer-copy -->
*
* @param disableBufferCopy If set to {@code true}, the client built from this builder will not deep-copy response
* {@link ByteBuffer ByteBuffers}.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder disableBufferCopy(boolean disableBufferCopy) {
this.disableBufferCopy = disableBufferCopy;
return this;
}
/**
* Sets the connection timeout for a request to be sent.
* <p>
* The connection timeout begins once the request attempts to connect to the remote host and finishes once the
* connection is resolved.
* <p>
* If {@code connectTimeout} is null either {@link Configuration
* 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will
* be used.
* <p>
* By default, the connection timeout is 10 seconds.
*
* @param connectTimeout Connect timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder connectTimeout(Duration connectTimeout) {
this.connectTimeout = connectTimeout;
return this;
}
/**
* Sets the writing timeout for a request to be sent.
* <p>
* The writing timeout does not apply to the entire request but to the request being sent over the wire. For example
* a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last
* write tracker will update when each operation completes and the outbound buffer will be periodically checked to
* determine if it is still draining.
* <p>
* If {@code writeTimeout} is null either {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be
* used.
*
* @param writeTimeout Write operation timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) {
this.writeTimeout = writeTimeout;
return this;
}
/**
* Sets the response timeout duration used when waiting for a server to reply.
* <p>
* The response timeout begins once the request write completes and finishes once the first response read is
* triggered when the server response is received.
* <p>
* If {@code responseTimeout} is null either {@link Configuration
* 60-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied to the response. When applying the timeout the greatest of one millisecond and the value of {@code
* responseTimeout} will be used.
*
* @param responseTimeout Response timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder responseTimeout(Duration responseTimeout) {
this.responseTimeout = responseTimeout;
return this;
}
/**
* Sets the read timeout duration used when reading the server response.
* <p>
* The read timeout begins once the first response read is triggered after the server response is received. This
* timeout triggers periodically but won't fire its operation if another read operation has completed between when
* the timeout is triggered and completes.
* <p>
* If {@code readTimeout} is null or {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be
* applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code
* readTimeout} will be used.
*
* @param readTimeout Read timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
private static boolean shouldUseCustomProxyHandler(ProxyOptions options) {
return options != null && options.getUsername() != null && options.getType() == ProxyOptions.Type.HTTP;
}
private static ProxyProvider.Proxy toReactorNettyProxyType(ProxyOptions.Type azureProxyType) {
switch (azureProxyType) {
case HTTP:
return ProxyProvider.Proxy.HTTP;
case SOCKS4:
return ProxyProvider.Proxy.SOCKS4;
case SOCKS5:
return ProxyProvider.Proxy.SOCKS5;
default:
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Unknown 'ProxyOptions.Type' enum value"));
}
}
/*
* Returns the timeout in milliseconds to use based on the passed Duration and default timeout.
*
* If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero
* no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used.
*/
static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) {
if (configuredTimeout == null) {
return defaultTimeout;
}
if (configuredTimeout.isZero() || configuredTimeout.isNegative()) {
return 0;
}
return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT);
}
} | class NettyAsyncHttpClientBuilder {
private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1);
private static final long DEFAULT_CONNECT_TIMEOUT;
private static final long DEFAULT_WRITE_TIMEOUT;
private static final long DEFAULT_RESPONSE_TIMEOUT;
private static final long DEFAULT_READ_TIMEOUT;
private static final ClientLogger LOGGER = new ClientLogger(NettyAsyncHttpClientBuilder.class);
static {
Configuration configuration = Configuration.getGlobalConfiguration();
DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), LOGGER).toMillis();
DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_RESPONSE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_RESPONSE_TIMEOUT, Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
}
private final HttpClient baseHttpClient;
private ProxyOptions proxyOptions;
private ConnectionProvider connectionProvider;
private boolean enableWiretap;
private int port = 80;
private EventLoopGroup eventLoopGroup;
private Configuration configuration;
private boolean disableBufferCopy;
private Duration connectTimeout;
private Duration writeTimeout;
private Duration responseTimeout;
private Duration readTimeout;
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link
* com.azure.core.http.HttpClient} backed by Reactor Netty.
*/
public NettyAsyncHttpClientBuilder() {
this.baseHttpClient = null;
}
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link HttpClient}
* based on the provided Reactor Netty HttpClient.
*
* <!-- src_embed com.azure.core.http.netty.from-existing-http-client -->
* <pre>
* &
* reactor.netty.http.client.HttpClient baseHttpClient = reactor.netty.http.client.HttpClient.create&
* .wiretap&
* &
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.from-existing-http-client -->
*
* @param nettyHttpClient base reactor netty HttpClient
*/
public NettyAsyncHttpClientBuilder(HttpClient nettyHttpClient) {
this.baseHttpClient = Objects.requireNonNull(nettyHttpClient, "'nettyHttpClient' cannot be null.");
}
/**
* Creates a new Netty-backed {@link com.azure.core.http.HttpClient} instance on every call, using the configuration
* set in the builder at the time of the build method call.
*
* @return A new Netty-backed {@link com.azure.core.http.HttpClient} instance.
* @throws IllegalStateException If the builder is configured to use an unknown proxy type.
*/
public com.azure.core.http.HttpClient build() {
HttpClient nettyHttpClient;
boolean addressResolverWasSetByBuilder = false;
if (this.baseHttpClient != null) {
nettyHttpClient = baseHttpClient;
} else if (this.connectionProvider != null) {
nettyHttpClient = HttpClient.create(this.connectionProvider).resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
} else {
nettyHttpClient = HttpClient.create().resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
}
HttpResponseDecoderSpec initialSpec = nettyHttpClient.configuration().decoder();
nettyHttpClient = nettyHttpClient
.port(port)
.wiretap(enableWiretap)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, (int) getTimeoutMillis(connectTimeout,
DEFAULT_CONNECT_TIMEOUT))
.httpResponseDecoder(httpResponseDecoderSpec -> initialSpec.validateHeaders(false));
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = proxyOptions == null
? ProxyOptions.fromConfiguration(buildConfiguration, true)
: proxyOptions;
/*
* Only configure the custom authorization challenge handler and challenge holder when using an authenticated
* HTTP proxy. All other proxying such as SOCKS4, SOCKS5, and anonymous HTTP will use Netty's built-in handlers.
*/
boolean useCustomProxyHandler = shouldUseCustomProxyHandler(buildProxyOptions);
AuthorizationChallengeHandler handler = useCustomProxyHandler
? new AuthorizationChallengeHandler(buildProxyOptions.getUsername(), buildProxyOptions.getPassword())
: null;
AtomicReference<ChallengeHolder> proxyChallengeHolder = useCustomProxyHandler ? new AtomicReference<>() : null;
boolean addProxyHandler = false;
Pattern nonProxyHostsPattern = null;
if (eventLoopGroup != null) {
nettyHttpClient = nettyHttpClient.runOn(eventLoopGroup);
}
if (buildProxyOptions != null) {
if (handler != null) {
addProxyHandler = true;
nonProxyHostsPattern = CoreUtils.isNullOrEmpty(buildProxyOptions.getNonProxyHosts())
? null
: Pattern.compile(buildProxyOptions.getNonProxyHosts(), Pattern.CASE_INSENSITIVE);
} else {
nettyHttpClient = nettyHttpClient.proxy(proxy ->
proxy.type(toReactorNettyProxyType(buildProxyOptions.getType()))
.address(buildProxyOptions.getAddress())
.username(buildProxyOptions.getUsername())
.password(ignored -> buildProxyOptions.getPassword())
.nonProxyHosts(buildProxyOptions.getNonProxyHosts()));
}
AddressResolverGroup<?> resolver = nettyHttpClient.configuration().resolver();
if (resolver == null || addressResolverWasSetByBuilder) {
nettyHttpClient = nettyHttpClient.resolver(NoopAddressResolverGroup.INSTANCE);
}
}
return new NettyAsyncHttpClient(nettyHttpClient, disableBufferCopy,
getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT),
getTimeoutMillis(responseTimeout, DEFAULT_RESPONSE_TIMEOUT), addProxyHandler, buildProxyOptions,
nonProxyHostsPattern, handler, proxyChallengeHolder);
}
/**
* Sets the connection provider.
*
* <p><strong>Code Sample</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
* <pre>
* &
* &
* &
* &
* ConnectionProvider connectionProvider = ConnectionProvider.builder&
* .maxConnections&
* .pendingAcquireMaxCount&
* .maxIdleTime&
* .build&
*
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .connectionProvider&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
*
* @param connectionProvider the connection provider
* @return the updated {@link NettyAsyncHttpClientBuilder} object.
*/
NettyAsyncHttpClientBuilder connectionProviderInternal(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
return this;
}
/**
* Sets the {@link ProxyOptions proxy options} that the client will use.
*
* @param proxyOptions The proxy configuration to use.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Enables the Netty wiretap feature.
*
* @param enableWiretap Flag indicating wiretap status
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder wiretap(boolean enableWiretap) {
this.enableWiretap = enableWiretap;
return this;
}
/**
* Sets the port which this client should connect, which by default will be set to port 80.
*
* @param port The port to connect to.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder port(int port) {
this.port = port;
return this;
}
/**
* Sets the NIO event loop group that will be used to run IO loops.
*
* @param nioEventLoopGroup The {@link NioEventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
* @deprecated deprecated in favor of {@link
*/
@Deprecated
public NettyAsyncHttpClientBuilder nioEventLoopGroup(NioEventLoopGroup nioEventLoopGroup) {
this.eventLoopGroup = nioEventLoopGroup;
return this;
}
/**
* Sets the IO event loop group that will be used to run IO loops.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder
* <pre>
* int threadCount = 5;
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder
*
* @param eventLoopGroup The {@link EventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder eventLoopGroup(EventLoopGroup eventLoopGroup) {
this.eventLoopGroup = eventLoopGroup;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Disables deep copy of response {@link ByteBuffer} into a heap location that is managed by this client as opposed
* to the underlying netty library which may use direct buffer pool.
* <br>
* <b>
* Caution: Disabling this is not recommended as it can lead to data corruption if the downstream consumers of the
* response do not handle the byte buffers before netty releases them.
* </b>
* If copy is disabled, underlying Netty layer can potentially reclaim byte array backed by the {@code ByteBuffer}
* upon the return of {@code onNext()}. So, users should ensure they process the {@link ByteBuffer} immediately and
* then return.
*
* <!-- src_embed com.azure.core.http.netty.disabled-buffer-copy -->
* <pre>
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .port&
* .disableBufferCopy&
* .build&
*
* client.send&
* .flatMapMany&
* .map&
* .subscribe&
* </pre>
* <!-- end com.azure.core.http.netty.disabled-buffer-copy -->
*
* @param disableBufferCopy If set to {@code true}, the client built from this builder will not deep-copy response
* {@link ByteBuffer ByteBuffers}.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder disableBufferCopy(boolean disableBufferCopy) {
this.disableBufferCopy = disableBufferCopy;
return this;
}
/**
* Sets the connection timeout for a request to be sent.
* <p>
* The connection timeout begins once the request attempts to connect to the remote host and finishes once the
* connection is resolved.
* <p>
* If {@code connectTimeout} is null either {@link Configuration
* 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will
* be used.
* <p>
* By default, the connection timeout is 10 seconds.
*
* @param connectTimeout Connect timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder connectTimeout(Duration connectTimeout) {
this.connectTimeout = connectTimeout;
return this;
}
/**
* Sets the writing timeout for a request to be sent.
* <p>
* The writing timeout does not apply to the entire request but to the request being sent over the wire. For example
* a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last
* write tracker will update when each operation completes and the outbound buffer will be periodically checked to
* determine if it is still draining.
* <p>
* If {@code writeTimeout} is null either {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be
* used.
*
* @param writeTimeout Write operation timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) {
this.writeTimeout = writeTimeout;
return this;
}
/**
* Sets the response timeout duration used when waiting for a server to reply.
* <p>
* The response timeout begins once the request write completes and finishes once the first response read is
* triggered when the server response is received.
* <p>
* If {@code responseTimeout} is null either {@link Configuration
* 60-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied to the response. When applying the timeout the greatest of one millisecond and the value of {@code
* responseTimeout} will be used.
*
* @param responseTimeout Response timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder responseTimeout(Duration responseTimeout) {
this.responseTimeout = responseTimeout;
return this;
}
/**
* Sets the read timeout duration used when reading the server response.
* <p>
* The read timeout begins once the first response read is triggered after the server response is received. This
* timeout triggers periodically but won't fire its operation if another read operation has completed between when
* the timeout is triggered and completes.
* <p>
* If {@code readTimeout} is null or {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be
* applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code
* readTimeout} will be used.
*
* @param readTimeout Read timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
private static boolean shouldUseCustomProxyHandler(ProxyOptions options) {
return options != null && options.getUsername() != null && options.getType() == ProxyOptions.Type.HTTP;
}
private static ProxyProvider.Proxy toReactorNettyProxyType(ProxyOptions.Type azureProxyType) {
switch (azureProxyType) {
case HTTP:
return ProxyProvider.Proxy.HTTP;
case SOCKS4:
return ProxyProvider.Proxy.SOCKS4;
case SOCKS5:
return ProxyProvider.Proxy.SOCKS5;
default:
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Unknown 'ProxyOptions.Type' enum value"));
}
}
/*
* Returns the timeout in milliseconds to use based on the passed Duration and default timeout.
*
* If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero
* no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used.
*/
static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) {
if (configuredTimeout == null) {
return defaultTimeout;
}
if (configuredTimeout.isZero() || configuredTimeout.isNegative()) {
return 0;
}
return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT);
}
} |
Added a link out to a wiki pages | public NettyAsyncHttpClientBuilder connectionProvider(ConnectionProvider connectionProvider) {
if (connectionProvider != null) {
LOGGER.verbose("Setting ConnectionProvider for the Reactor Netty HttpClient. Please be aware of the "
+ "differences in runtime behavior when creating a default Reactor Netty HttpClient vs an HttpClient"
+ "with a specified ConnectionProvider, namely around handling for maximum number of connections and"
+ "maximum number of pending connections.");
}
this.connectionProvider = connectionProvider;
return this;
} | + "maximum number of pending connections."); | public NettyAsyncHttpClientBuilder connectionProvider(ConnectionProvider connectionProvider) {
if (connectionProvider != null) {
LOGGER.verbose("Setting ConnectionProvider for the Reactor Netty HttpClient. Please be aware of the "
+ "differences in runtime behavior when creating a default Reactor Netty HttpClient vs an HttpClient"
+ "with a specified ConnectionProvider. For more details see "
+ "https:
}
this.connectionProvider = connectionProvider;
return this;
} | class NettyAsyncHttpClientBuilder {
private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1);
private static final long DEFAULT_CONNECT_TIMEOUT;
private static final long DEFAULT_WRITE_TIMEOUT;
private static final long DEFAULT_RESPONSE_TIMEOUT;
private static final long DEFAULT_READ_TIMEOUT;
private static final ClientLogger LOGGER = new ClientLogger(NettyAsyncHttpClientBuilder.class);
static {
Configuration configuration = Configuration.getGlobalConfiguration();
DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), LOGGER).toMillis();
DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_RESPONSE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_RESPONSE_TIMEOUT, Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
}
private final HttpClient baseHttpClient;
private ProxyOptions proxyOptions;
private ConnectionProvider connectionProvider;
private boolean enableWiretap;
private int port = 80;
private EventLoopGroup eventLoopGroup;
private Configuration configuration;
private boolean disableBufferCopy;
private Duration connectTimeout;
private Duration writeTimeout;
private Duration responseTimeout;
private Duration readTimeout;
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link
* com.azure.core.http.HttpClient} backed by Reactor Netty.
*/
public NettyAsyncHttpClientBuilder() {
this.baseHttpClient = null;
}
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link HttpClient}
* based on the provided Reactor Netty HttpClient.
*
* <!-- src_embed com.azure.core.http.netty.from-existing-http-client -->
* <pre>
* &
* reactor.netty.http.client.HttpClient baseHttpClient = reactor.netty.http.client.HttpClient.create&
* .wiretap&
* &
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.from-existing-http-client -->
*
* @param nettyHttpClient base reactor netty HttpClient
*/
public NettyAsyncHttpClientBuilder(HttpClient nettyHttpClient) {
this.baseHttpClient = Objects.requireNonNull(nettyHttpClient, "'nettyHttpClient' cannot be null.");
}
/**
* Creates a new Netty-backed {@link com.azure.core.http.HttpClient} instance on every call, using the configuration
* set in the builder at the time of the build method call.
*
* @return A new Netty-backed {@link com.azure.core.http.HttpClient} instance.
* @throws IllegalStateException If the builder is configured to use an unknown proxy type.
*/
public com.azure.core.http.HttpClient build() {
HttpClient nettyHttpClient;
boolean addressResolverWasSetByBuilder = false;
if (this.baseHttpClient != null) {
nettyHttpClient = baseHttpClient;
} else if (this.connectionProvider != null) {
nettyHttpClient = HttpClient.create(this.connectionProvider).resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
} else {
nettyHttpClient = HttpClient.create().resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
}
HttpResponseDecoderSpec initialSpec = nettyHttpClient.configuration().decoder();
nettyHttpClient = nettyHttpClient
.port(port)
.wiretap(enableWiretap)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, (int) getTimeoutMillis(connectTimeout,
DEFAULT_CONNECT_TIMEOUT))
.httpResponseDecoder(httpResponseDecoderSpec -> initialSpec.validateHeaders(false));
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = proxyOptions == null
? ProxyOptions.fromConfiguration(buildConfiguration, true)
: proxyOptions;
/*
* Only configure the custom authorization challenge handler and challenge holder when using an authenticated
* HTTP proxy. All other proxying such as SOCKS4, SOCKS5, and anonymous HTTP will use Netty's built-in handlers.
*/
boolean useCustomProxyHandler = shouldUseCustomProxyHandler(buildProxyOptions);
AuthorizationChallengeHandler handler = useCustomProxyHandler
? new AuthorizationChallengeHandler(buildProxyOptions.getUsername(), buildProxyOptions.getPassword())
: null;
AtomicReference<ChallengeHolder> proxyChallengeHolder = useCustomProxyHandler ? new AtomicReference<>() : null;
boolean addProxyHandler = false;
Pattern nonProxyHostsPattern = null;
if (eventLoopGroup != null) {
nettyHttpClient = nettyHttpClient.runOn(eventLoopGroup);
}
if (buildProxyOptions != null) {
if (handler != null) {
addProxyHandler = true;
nonProxyHostsPattern = CoreUtils.isNullOrEmpty(buildProxyOptions.getNonProxyHosts())
? null
: Pattern.compile(buildProxyOptions.getNonProxyHosts(), Pattern.CASE_INSENSITIVE);
} else {
nettyHttpClient = nettyHttpClient.proxy(proxy ->
proxy.type(toReactorNettyProxyType(buildProxyOptions.getType()))
.address(buildProxyOptions.getAddress())
.username(buildProxyOptions.getUsername())
.password(ignored -> buildProxyOptions.getPassword())
.nonProxyHosts(buildProxyOptions.getNonProxyHosts()));
}
AddressResolverGroup<?> resolver = nettyHttpClient.configuration().resolver();
if (resolver == null || addressResolverWasSetByBuilder) {
nettyHttpClient = nettyHttpClient.resolver(NoopAddressResolverGroup.INSTANCE);
}
}
return new NettyAsyncHttpClient(nettyHttpClient, disableBufferCopy,
getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT),
getTimeoutMillis(responseTimeout, DEFAULT_RESPONSE_TIMEOUT), addProxyHandler, buildProxyOptions,
nonProxyHostsPattern, handler, proxyChallengeHolder);
}
/**
* Sets the connection provider.
*
* <p><strong>Code Sample</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
* <pre>
* &
* &
* &
* &
* ConnectionProvider connectionProvider = ConnectionProvider.builder&
* .maxConnections&
* .pendingAcquireMaxCount&
* .maxIdleTime&
* .build&
*
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .connectionProvider&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
*
* @param connectionProvider the connection provider
* @return the updated {@link NettyAsyncHttpClientBuilder} object.
*/
NettyAsyncHttpClientBuilder connectionProviderInternal(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
return this;
}
/**
* Sets the {@link ProxyOptions proxy options} that the client will use.
*
* @param proxyOptions The proxy configuration to use.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Enables the Netty wiretap feature.
*
* @param enableWiretap Flag indicating wiretap status
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder wiretap(boolean enableWiretap) {
this.enableWiretap = enableWiretap;
return this;
}
/**
* Sets the port which this client should connect, which by default will be set to port 80.
*
* @param port The port to connect to.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder port(int port) {
this.port = port;
return this;
}
/**
* Sets the NIO event loop group that will be used to run IO loops.
*
* @param nioEventLoopGroup The {@link NioEventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
* @deprecated deprecated in favor of {@link
*/
@Deprecated
public NettyAsyncHttpClientBuilder nioEventLoopGroup(NioEventLoopGroup nioEventLoopGroup) {
this.eventLoopGroup = nioEventLoopGroup;
return this;
}
/**
* Sets the IO event loop group that will be used to run IO loops.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder
* <pre>
* int threadCount = 5;
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder
*
* @param eventLoopGroup The {@link EventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder eventLoopGroup(EventLoopGroup eventLoopGroup) {
this.eventLoopGroup = eventLoopGroup;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Disables deep copy of response {@link ByteBuffer} into a heap location that is managed by this client as opposed
* to the underlying netty library which may use direct buffer pool.
* <br>
* <b>
* Caution: Disabling this is not recommended as it can lead to data corruption if the downstream consumers of the
* response do not handle the byte buffers before netty releases them.
* </b>
* If copy is disabled, underlying Netty layer can potentially reclaim byte array backed by the {@code ByteBuffer}
* upon the return of {@code onNext()}. So, users should ensure they process the {@link ByteBuffer} immediately and
* then return.
*
* <!-- src_embed com.azure.core.http.netty.disabled-buffer-copy -->
* <pre>
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .port&
* .disableBufferCopy&
* .build&
*
* client.send&
* .flatMapMany&
* .map&
* .subscribe&
* </pre>
* <!-- end com.azure.core.http.netty.disabled-buffer-copy -->
*
* @param disableBufferCopy If set to {@code true}, the client built from this builder will not deep-copy response
* {@link ByteBuffer ByteBuffers}.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder disableBufferCopy(boolean disableBufferCopy) {
this.disableBufferCopy = disableBufferCopy;
return this;
}
/**
* Sets the connection timeout for a request to be sent.
* <p>
* The connection timeout begins once the request attempts to connect to the remote host and finishes once the
* connection is resolved.
* <p>
* If {@code connectTimeout} is null either {@link Configuration
* 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will
* be used.
* <p>
* By default, the connection timeout is 10 seconds.
*
* @param connectTimeout Connect timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder connectTimeout(Duration connectTimeout) {
this.connectTimeout = connectTimeout;
return this;
}
/**
* Sets the writing timeout for a request to be sent.
* <p>
* The writing timeout does not apply to the entire request but to the request being sent over the wire. For example
* a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last
* write tracker will update when each operation completes and the outbound buffer will be periodically checked to
* determine if it is still draining.
* <p>
* If {@code writeTimeout} is null either {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be
* used.
*
* @param writeTimeout Write operation timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) {
this.writeTimeout = writeTimeout;
return this;
}
/**
* Sets the response timeout duration used when waiting for a server to reply.
* <p>
* The response timeout begins once the request write completes and finishes once the first response read is
* triggered when the server response is received.
* <p>
* If {@code responseTimeout} is null either {@link Configuration
* 60-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied to the response. When applying the timeout the greatest of one millisecond and the value of {@code
* responseTimeout} will be used.
*
* @param responseTimeout Response timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder responseTimeout(Duration responseTimeout) {
this.responseTimeout = responseTimeout;
return this;
}
/**
* Sets the read timeout duration used when reading the server response.
* <p>
* The read timeout begins once the first response read is triggered after the server response is received. This
* timeout triggers periodically but won't fire its operation if another read operation has completed between when
* the timeout is triggered and completes.
* <p>
* If {@code readTimeout} is null or {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be
* applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code
* readTimeout} will be used.
*
* @param readTimeout Read timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
private static boolean shouldUseCustomProxyHandler(ProxyOptions options) {
return options != null && options.getUsername() != null && options.getType() == ProxyOptions.Type.HTTP;
}
private static ProxyProvider.Proxy toReactorNettyProxyType(ProxyOptions.Type azureProxyType) {
switch (azureProxyType) {
case HTTP:
return ProxyProvider.Proxy.HTTP;
case SOCKS4:
return ProxyProvider.Proxy.SOCKS4;
case SOCKS5:
return ProxyProvider.Proxy.SOCKS5;
default:
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Unknown 'ProxyOptions.Type' enum value"));
}
}
/*
* Returns the timeout in milliseconds to use based on the passed Duration and default timeout.
*
* If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero
* no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used.
*/
static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) {
if (configuredTimeout == null) {
return defaultTimeout;
}
if (configuredTimeout.isZero() || configuredTimeout.isNegative()) {
return 0;
}
return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT);
}
} | class NettyAsyncHttpClientBuilder {
private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1);
private static final long DEFAULT_CONNECT_TIMEOUT;
private static final long DEFAULT_WRITE_TIMEOUT;
private static final long DEFAULT_RESPONSE_TIMEOUT;
private static final long DEFAULT_READ_TIMEOUT;
private static final ClientLogger LOGGER = new ClientLogger(NettyAsyncHttpClientBuilder.class);
static {
Configuration configuration = Configuration.getGlobalConfiguration();
DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), LOGGER).toMillis();
DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_RESPONSE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_RESPONSE_TIMEOUT, Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
}
private final HttpClient baseHttpClient;
private ProxyOptions proxyOptions;
private ConnectionProvider connectionProvider;
private boolean enableWiretap;
private int port = 80;
private EventLoopGroup eventLoopGroup;
private Configuration configuration;
private boolean disableBufferCopy;
private Duration connectTimeout;
private Duration writeTimeout;
private Duration responseTimeout;
private Duration readTimeout;
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link
* com.azure.core.http.HttpClient} backed by Reactor Netty.
*/
public NettyAsyncHttpClientBuilder() {
this.baseHttpClient = null;
}
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link HttpClient}
* based on the provided Reactor Netty HttpClient.
*
* <!-- src_embed com.azure.core.http.netty.from-existing-http-client -->
* <pre>
* &
* reactor.netty.http.client.HttpClient baseHttpClient = reactor.netty.http.client.HttpClient.create&
* .wiretap&
* &
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.from-existing-http-client -->
*
* @param nettyHttpClient base reactor netty HttpClient
*/
public NettyAsyncHttpClientBuilder(HttpClient nettyHttpClient) {
this.baseHttpClient = Objects.requireNonNull(nettyHttpClient, "'nettyHttpClient' cannot be null.");
}
/**
* Creates a new Netty-backed {@link com.azure.core.http.HttpClient} instance on every call, using the configuration
* set in the builder at the time of the build method call.
*
* @return A new Netty-backed {@link com.azure.core.http.HttpClient} instance.
* @throws IllegalStateException If the builder is configured to use an unknown proxy type.
*/
public com.azure.core.http.HttpClient build() {
HttpClient nettyHttpClient;
boolean addressResolverWasSetByBuilder = false;
if (this.baseHttpClient != null) {
nettyHttpClient = baseHttpClient;
} else if (this.connectionProvider != null) {
nettyHttpClient = HttpClient.create(this.connectionProvider).resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
} else {
nettyHttpClient = HttpClient.create().resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
}
HttpResponseDecoderSpec initialSpec = nettyHttpClient.configuration().decoder();
nettyHttpClient = nettyHttpClient
.port(port)
.wiretap(enableWiretap)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, (int) getTimeoutMillis(connectTimeout,
DEFAULT_CONNECT_TIMEOUT))
.httpResponseDecoder(httpResponseDecoderSpec -> initialSpec.validateHeaders(false));
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = proxyOptions == null
? ProxyOptions.fromConfiguration(buildConfiguration, true)
: proxyOptions;
/*
* Only configure the custom authorization challenge handler and challenge holder when using an authenticated
* HTTP proxy. All other proxying such as SOCKS4, SOCKS5, and anonymous HTTP will use Netty's built-in handlers.
*/
boolean useCustomProxyHandler = shouldUseCustomProxyHandler(buildProxyOptions);
AuthorizationChallengeHandler handler = useCustomProxyHandler
? new AuthorizationChallengeHandler(buildProxyOptions.getUsername(), buildProxyOptions.getPassword())
: null;
AtomicReference<ChallengeHolder> proxyChallengeHolder = useCustomProxyHandler ? new AtomicReference<>() : null;
boolean addProxyHandler = false;
Pattern nonProxyHostsPattern = null;
if (eventLoopGroup != null) {
nettyHttpClient = nettyHttpClient.runOn(eventLoopGroup);
}
if (buildProxyOptions != null) {
if (handler != null) {
addProxyHandler = true;
nonProxyHostsPattern = CoreUtils.isNullOrEmpty(buildProxyOptions.getNonProxyHosts())
? null
: Pattern.compile(buildProxyOptions.getNonProxyHosts(), Pattern.CASE_INSENSITIVE);
} else {
nettyHttpClient = nettyHttpClient.proxy(proxy ->
proxy.type(toReactorNettyProxyType(buildProxyOptions.getType()))
.address(buildProxyOptions.getAddress())
.username(buildProxyOptions.getUsername())
.password(ignored -> buildProxyOptions.getPassword())
.nonProxyHosts(buildProxyOptions.getNonProxyHosts()));
}
AddressResolverGroup<?> resolver = nettyHttpClient.configuration().resolver();
if (resolver == null || addressResolverWasSetByBuilder) {
nettyHttpClient = nettyHttpClient.resolver(NoopAddressResolverGroup.INSTANCE);
}
}
return new NettyAsyncHttpClient(nettyHttpClient, disableBufferCopy,
getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT),
getTimeoutMillis(responseTimeout, DEFAULT_RESPONSE_TIMEOUT), addProxyHandler, buildProxyOptions,
nonProxyHostsPattern, handler, proxyChallengeHolder);
}
/**
* Sets the connection provider.
*
* <p><strong>Code Sample</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
* <pre>
* &
* &
* &
* &
* ConnectionProvider connectionProvider = ConnectionProvider.builder&
* .maxConnections&
* .pendingAcquireMaxCount&
* .maxIdleTime&
* .build&
*
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .connectionProvider&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
*
* @param connectionProvider the connection provider
* @return the updated {@link NettyAsyncHttpClientBuilder} object.
*/
NettyAsyncHttpClientBuilder connectionProviderInternal(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
return this;
}
/**
* Sets the {@link ProxyOptions proxy options} that the client will use.
*
* @param proxyOptions The proxy configuration to use.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Enables the Netty wiretap feature.
*
* @param enableWiretap Flag indicating wiretap status
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder wiretap(boolean enableWiretap) {
this.enableWiretap = enableWiretap;
return this;
}
/**
* Sets the port which this client should connect, which by default will be set to port 80.
*
* @param port The port to connect to.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder port(int port) {
this.port = port;
return this;
}
/**
* Sets the NIO event loop group that will be used to run IO loops.
*
* @param nioEventLoopGroup The {@link NioEventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
* @deprecated deprecated in favor of {@link
*/
@Deprecated
public NettyAsyncHttpClientBuilder nioEventLoopGroup(NioEventLoopGroup nioEventLoopGroup) {
this.eventLoopGroup = nioEventLoopGroup;
return this;
}
/**
* Sets the IO event loop group that will be used to run IO loops.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder
* <pre>
* int threadCount = 5;
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder
*
* @param eventLoopGroup The {@link EventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder eventLoopGroup(EventLoopGroup eventLoopGroup) {
this.eventLoopGroup = eventLoopGroup;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Disables deep copy of response {@link ByteBuffer} into a heap location that is managed by this client as opposed
* to the underlying netty library which may use direct buffer pool.
* <br>
* <b>
* Caution: Disabling this is not recommended as it can lead to data corruption if the downstream consumers of the
* response do not handle the byte buffers before netty releases them.
* </b>
* If copy is disabled, underlying Netty layer can potentially reclaim byte array backed by the {@code ByteBuffer}
* upon the return of {@code onNext()}. So, users should ensure they process the {@link ByteBuffer} immediately and
* then return.
*
* <!-- src_embed com.azure.core.http.netty.disabled-buffer-copy -->
* <pre>
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .port&
* .disableBufferCopy&
* .build&
*
* client.send&
* .flatMapMany&
* .map&
* .subscribe&
* </pre>
* <!-- end com.azure.core.http.netty.disabled-buffer-copy -->
*
* @param disableBufferCopy If set to {@code true}, the client built from this builder will not deep-copy response
* {@link ByteBuffer ByteBuffers}.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder disableBufferCopy(boolean disableBufferCopy) {
this.disableBufferCopy = disableBufferCopy;
return this;
}
/**
* Sets the connection timeout for a request to be sent.
* <p>
* The connection timeout begins once the request attempts to connect to the remote host and finishes once the
* connection is resolved.
* <p>
* If {@code connectTimeout} is null either {@link Configuration
* 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will
* be used.
* <p>
* By default, the connection timeout is 10 seconds.
*
* @param connectTimeout Connect timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder connectTimeout(Duration connectTimeout) {
this.connectTimeout = connectTimeout;
return this;
}
/**
* Sets the writing timeout for a request to be sent.
* <p>
* The writing timeout does not apply to the entire request but to the request being sent over the wire. For example
* a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last
* write tracker will update when each operation completes and the outbound buffer will be periodically checked to
* determine if it is still draining.
* <p>
* If {@code writeTimeout} is null either {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be
* used.
*
* @param writeTimeout Write operation timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) {
this.writeTimeout = writeTimeout;
return this;
}
/**
* Sets the response timeout duration used when waiting for a server to reply.
* <p>
* The response timeout begins once the request write completes and finishes once the first response read is
* triggered when the server response is received.
* <p>
* If {@code responseTimeout} is null either {@link Configuration
* 60-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied to the response. When applying the timeout the greatest of one millisecond and the value of {@code
* responseTimeout} will be used.
*
* @param responseTimeout Response timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder responseTimeout(Duration responseTimeout) {
this.responseTimeout = responseTimeout;
return this;
}
/**
* Sets the read timeout duration used when reading the server response.
* <p>
* The read timeout begins once the first response read is triggered after the server response is received. This
* timeout triggers periodically but won't fire its operation if another read operation has completed between when
* the timeout is triggered and completes.
* <p>
* If {@code readTimeout} is null or {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be
* applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code
* readTimeout} will be used.
*
* @param readTimeout Read timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
private static boolean shouldUseCustomProxyHandler(ProxyOptions options) {
return options != null && options.getUsername() != null && options.getType() == ProxyOptions.Type.HTTP;
}
private static ProxyProvider.Proxy toReactorNettyProxyType(ProxyOptions.Type azureProxyType) {
switch (azureProxyType) {
case HTTP:
return ProxyProvider.Proxy.HTTP;
case SOCKS4:
return ProxyProvider.Proxy.SOCKS4;
case SOCKS5:
return ProxyProvider.Proxy.SOCKS5;
default:
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Unknown 'ProxyOptions.Type' enum value"));
}
}
/*
* Returns the timeout in milliseconds to use based on the passed Duration and default timeout.
*
* If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero
* no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used.
*/
static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) {
if (configuredTimeout == null) {
return defaultTimeout;
}
if (configuredTimeout.isZero() || configuredTimeout.isNegative()) {
return 0;
}
return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT);
}
} |
It may be better to add a generalized helper method to compare expected documents vs actual documents where the API takes two `Map<String, T>` where `T` is the document type (in this case `NonNullableModel` and in the other change `Map<String, Object>`). Where the `String` key is the document key, which always is a String. | public void canRoundTripNonNullableValueTypes() {
client = setupClient(this::createIndexWithNonNullableTypes);
Date startEpoch = Date.from(Instant.ofEpochMilli(1275346800000L));
NonNullableModel doc1 = new NonNullableModel()
.key("132")
.count(3)
.isEnabled(true)
.rating(5)
.ratio(3.25)
.startDate(new Date(startEpoch.getTime()))
.endDate(new Date(startEpoch.getTime()))
.topLevelBucket(new Bucket().bucketName("A").count(12))
.buckets(new Bucket[]{new Bucket().bucketName("B").count(20), new Bucket().bucketName("C").count(7)});
NonNullableModel doc2 = new NonNullableModel().key("456").buckets(new Bucket[]{});
uploadDocuments(client, Arrays.asList(doc1, doc2));
SearchPagedIterable results = client.search("*", new SearchOptions(), Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertTrue(iterator.hasNext());
SearchPagedResponse result = iterator.next();
assertEquals(2, result.getValue().size());
try {
assertObjectEquals(doc1, result.getValue().get(0).getDocument(NonNullableModel.class), true);
assertObjectEquals(doc2, result.getValue().get(1).getDocument(NonNullableModel.class), true);
} catch (AssertionError e) {
assertObjectEquals(doc2, result.getValue().get(0).getDocument(NonNullableModel.class), true);
assertObjectEquals(doc1, result.getValue().get(1).getDocument(NonNullableModel.class), true);
}
} | try { | public void canRoundTripNonNullableValueTypes() {
client = setupClient(this::createIndexWithNonNullableTypes);
Date startEpoch = Date.from(Instant.ofEpochMilli(1275346800000L));
NonNullableModel doc1 = new NonNullableModel()
.key("132")
.count(3)
.isEnabled(true)
.rating(5)
.ratio(3.25)
.startDate(new Date(startEpoch.getTime()))
.endDate(new Date(startEpoch.getTime()))
.topLevelBucket(new Bucket().bucketName("A").count(12))
.buckets(new Bucket[]{new Bucket().bucketName("B").count(20), new Bucket().bucketName("C").count(7)});
NonNullableModel doc2 = new NonNullableModel().key("456").buckets(new Bucket[]{});
uploadDocuments(client, Arrays.asList(doc1, doc2));
SearchPagedIterable results = client.search("*", new SearchOptions(), Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertTrue(iterator.hasNext());
SearchPagedResponse result = iterator.next();
assertEquals(2, result.getValue().size());
List<NonNullableModel> expectedDocuments = new ArrayList<>();
expectedDocuments.add(doc1); expectedDocuments.add(doc2);
List<NonNullableModel> actualDocuments = new ArrayList<>();
result.getValue().forEach(val -> actualDocuments.add(val.getDocument(NonNullableModel.class)));
assertTrue(equalDocumentSets(expectedDocuments, actualDocuments));
} | class SearchSyncTests extends SearchTestBase {
private final List<String> indexesToDelete = new ArrayList<>();
private String synonymMapToDelete = "";
private static final String INDEX_NAME = "azsearch-search-shared-instance";
private static SearchIndexClient searchIndexClient;
private SearchClient client;
@BeforeAll
public static void setupClass() {
TestBase.setupClass();
if (TEST_MODE == TestMode.PLAYBACK) {
return;
}
searchIndexClient = setupSharedIndex(INDEX_NAME);
}
@Override
protected void afterTest() {
super.afterTest();
SearchIndexClient serviceClient = getSearchIndexClientBuilder().buildClient();
for (String index : indexesToDelete) {
serviceClient.deleteIndex(index);
}
if (!CoreUtils.isNullOrEmpty(synonymMapToDelete)) {
serviceClient.deleteSynonymMap(synonymMapToDelete);
sleepIfRunningAgainstService(5000);
}
}
@AfterAll
protected static void cleanupClass() {
if (TEST_MODE != TestMode.PLAYBACK) {
searchIndexClient.deleteIndex(INDEX_NAME);
}
}
private SearchClient setupClient(Supplier<String> indexSupplier) {
String indexName = indexSupplier.get();
indexesToDelete.add(indexName);
return getSearchClientBuilder(indexName).buildClient();
}
@Test
public void searchThrowsWhenRequestIsMalformed() {
SearchOptions invalidSearchOptions = new SearchOptions().setFilter("This is not a valid filter.");
assertHttpResponseException(
() -> search("*", invalidSearchOptions),
HttpURLConnection.HTTP_BAD_REQUEST,
"Invalid expression: Syntax error at position 7 in 'This is not a valid filter.'");
}
@Test
public void searchThrowsWhenSpecialCharInRegexIsUnescaped() {
SearchOptions invalidSearchOptions = new SearchOptions().setQueryType(QueryType.FULL);
assertHttpResponseException(
() -> search("/.*/.*/", invalidSearchOptions),
HttpURLConnection.HTTP_BAD_REQUEST,
"Failed to parse query string at line 1, column 8.");
}
private void search(String searchText, SearchOptions searchOptions) {
getSearchClientBuilder(INDEX_NAME).buildClient().search(searchText, searchOptions, Context.NONE)
.iterableByPage()
.iterator()
.next();
}
@Test
public void canSearchDynamicDocuments() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = uploadDocumentsJson(client, HOTELS_DATA_JSON_WITHOUT_FR_DESCRIPTION);
SearchPagedIterable searchResults = client.search("*");
assertNotNull(searchResults);
assertNull(searchResults.getTotalCount());
assertNull(searchResults.getCoverage());
assertNull(searchResults.getFacets());
Iterator<SearchPagedResponse> iterator = searchResults.iterableByPage().iterator();
List<Map<String, Object>> actualResults = new ArrayList<>(hotels.size());
while (iterator.hasNext()) {
SearchPagedResponse result = iterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> {
assertEquals(1, item.getScore(), 0);
assertNull(item.getHighlights());
actualResults.add(item.getDocument(SearchDocument.class));
});
}
assertEquals(hotels.size(), actualResults.size());
actualResults.sort(Comparator.comparing(doc -> Integer.parseInt(doc.get("HotelId").toString())));
assertTrue(compareResults(actualResults, hotels));
}
@Test
public void canContinueSearch() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = createHotelsList(100);
uploadDocuments(client, hotels);
SearchOptions searchOptions = new SearchOptions().setSelect("HotelId")
.setOrderBy("HotelId asc");
List<String> expectedHotelIds = hotels.stream().map(hotel -> (String) hotel.get("HotelId")).sorted()
.collect(Collectors.toList());
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse firstPage = iterator.next();
assertEquals(50, firstPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(0, 50), firstPage.getValue());
assertNotNull(firstPage.getContinuationToken());
SearchPagedResponse secondPage = iterator.next();
assertEquals(50, secondPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(50, 100), secondPage.getValue());
assertNull(secondPage.getContinuationToken());
}
@Test
public void canContinueSearchWithTop() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = createHotelsList(3000);
uploadDocuments(client, hotels);
SearchOptions searchOptions = new SearchOptions()
.setTop(2000)
.setSelect("HotelId")
.setOrderBy("HotelId asc");
List<String> expectedHotelIds = hotels.stream().map(hotel -> (String) hotel.get("HotelId")).sorted()
.collect(Collectors.toList());
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse firstPage = iterator.next();
assertEquals(1000, firstPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(0, 1000), firstPage.getValue());
assertNotNull(firstPage.getContinuationToken());
SearchPagedResponse secondPage = iterator.next();
assertEquals(1000, secondPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(1000, 2000), secondPage.getValue());
assertNull(secondPage.getContinuationToken());
}
@Test
public void canSearchStaticallyTypedDocuments() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = uploadDocumentsJson(client, HOTELS_DATA_JSON_WITHOUT_FR_DESCRIPTION);
SearchPagedIterable results = client.search("*", new SearchOptions(), Context.NONE);
assertNotNull(results);
assertNull(results.getTotalCount());
assertNull(results.getCoverage());
assertNull(results.getFacets());
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
List<Hotel> actualResults = new ArrayList<>(hotels.size());
while (iterator.hasNext()) {
SearchPagedResponse result = iterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> {
assertEquals(1, item.getScore(), 0);
assertNull(item.getHighlights());
Hotel hotel = item.getDocument(Hotel.class);
actualResults.add(hotel);
});
}
List<Hotel> hotelsList = hotels.stream()
.map(hotel -> convertMapToValue(hotel, Hotel.class))
.collect(Collectors.toList());
assertEquals(hotelsList.size(), actualResults.size());
actualResults.sort(Comparator.comparing(doc -> Integer.parseInt(doc.hotelId())));
for (int i = 0; i < hotelsList.size(); i++) {
assertObjectEquals(hotelsList.get(i), actualResults.get(i), true, "properties");
}
}
@SuppressWarnings("UseOfObsoleteDateTimeApi")
@Test
@SuppressWarnings("UseOfObsoleteDateTimeApi")
@Test
public void canSearchWithDateInStaticModel() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
OffsetDateTime expected = OffsetDateTime.parse("2010-06-27T00:00:00Z");
SearchPagedIterable results = client.search("Fancy", new SearchOptions(), Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertTrue(iterator.hasNext());
SearchPagedResponse result = iterator.next();
assertEquals(1, result.getValue().size());
Date actual = result.getValue().get(0).getDocument(Hotel.class).lastRenovationDate();
long epochMilli = expected.toInstant().toEpochMilli();
assertEquals(new Date(epochMilli), actual);
}
@Test
public void canSearchWithSelectedFields() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions sp = new SearchOptions();
sp.setSearchFields("HotelName", "Category");
sp.setSelect("HotelName", "Rating", "Address/City", "Rooms/Type");
SearchPagedIterable results = client.search("fancy luxury secret", sp, Context.NONE);
HashMap<String, Object> expectedHotel1 = new HashMap<>();
expectedHotel1.put("HotelName", "Fancy Stay");
expectedHotel1.put("Rating", 5);
expectedHotel1.put("Address", null);
expectedHotel1.put("Rooms", Collections.emptyList());
HashMap<String, Object> expectedHotel2 = new HashMap<>();
expectedHotel2.put("HotelName", "Secret Point Motel");
expectedHotel2.put("Rating", 4);
HashMap<String, Object> address = new HashMap<>();
address.put("City", "New York");
expectedHotel2.put("Address", address);
HashMap<String, Object> rooms = new HashMap<>();
rooms.put("Type", "Budget Room");
HashMap<String, Object> rooms2 = new HashMap<>();
rooms2.put("Type", "Budget Room");
expectedHotel2.put("Rooms", Arrays.asList(rooms, rooms2));
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse result = iterator.next();
assertEquals(2, result.getValue().size());
Map<String, Object> hotel1 = extractAndTransformSingleResult(result.getValue().get(0));
Map<String, Object> hotel2 = extractAndTransformSingleResult(result.getValue().get(1));
assertMapEquals(expectedHotel1, hotel1, true);
assertMapEquals(expectedHotel2, hotel2, true);
}
@Test
public void canUseTopAndSkipForClientSidePaging() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions parameters = new SearchOptions().setTop(3).setSkip(0).setOrderBy("HotelId");
SearchPagedIterable results = client.search("*", parameters, Context.NONE);
assertKeySequenceEqual(results, Arrays.asList("1", "10", "2"));
parameters.setSkip(3);
results = client.search("*", parameters, Context.NONE);
assertKeySequenceEqual(results, Arrays.asList("3", "4", "5"));
}
@Test
public void searchWithoutOrderBySortsByScore() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
Iterator<SearchResult> results = client
.search("*", new SearchOptions().setFilter("Rating lt 4"), Context.NONE).iterator();
SearchResult firstResult = results.next();
SearchResult secondResult = results.next();
assertTrue(firstResult.getScore() <= secondResult.getScore());
}
@Test
public void orderByProgressivelyBreaksTies() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
String[] expectedResults = new String[]{"1", "9", "3", "4", "5", "10", "2", "6", "7", "8"};
Stream<String> results = client
.search("*", new SearchOptions().setOrderBy("Rating desc", "LastRenovationDate asc", "HotelId"),
Context.NONE).stream()
.map(this::getSearchResultId);
assertArrayEquals(results.toArray(), expectedResults);
}
@Test
public void canFilter() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setFilter("Rating gt 3 and LastRenovationDate gt 2000-01-01T00:00:00Z")
.setOrderBy("HotelId asc");
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(2, searchResultsList.size());
assertEquals("1", searchResultsList.get(0).get("HotelId").toString());
assertEquals("5", searchResultsList.get(1).get("HotelId").toString());
}
@Test
public void canSearchWithRangeFacets() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", getSearchOptionsForRangeFacets(),
Context.NONE);
assertNotNull(results.getFacets());
List<RangeFacetResult<String>> baseRateFacets = getRangeFacetsForField(results.getFacets(),
"Rooms/BaseRate", 4);
List<RangeFacetResult<String>> lastRenovationDateFacets = getRangeFacetsForField(
results.getFacets(), "LastRenovationDate", 2);
assertRangeFacets(baseRateFacets, lastRenovationDateFacets);
for (SearchPagedResponse result : results.iterableByPage()) {
assertContainHotelIds(hotels, result.getValue());
}
}
@Test
public void canSearchWithValueFacets() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", getSearchOptionsForValueFacets(),
Context.NONE);
Map<String, List<FacetResult>> facets = results.getFacets();
assertNotNull(facets);
for (SearchPagedResponse result : results.iterableByPage()) {
assertContainHotelIds(hotels, result.getValue());
assertValueFacetsEqual(
getValueFacetsForField(facets, "Rating", 2),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, 5),
new ValueFacetResult<>(4L, 4))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "SmokingAllowed", 2),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(4L, false),
new ValueFacetResult<>(2L, true))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Category", 3),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(5L, "Budget"),
new ValueFacetResult<>(1L, "Boutique"),
new ValueFacetResult<>(1L, "Luxury"))));
assertValueFacetsEqual(getValueFacetsForField(facets, "LastRenovationDate", 6),
new ArrayList<>(Arrays.asList(new ValueFacetResult<>(1L, OffsetDateTime.parse("1970-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("1982-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(2L, OffsetDateTime.parse("1995-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("1999-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("2010-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("2012-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Rooms/BaseRate", 4),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, 2.44),
new ValueFacetResult<>(1L, 7.69),
new ValueFacetResult<>(1L, 8.09),
new ValueFacetResult<>(1L, 9.69))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Tags", 10),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, "24-hour front desk service"),
new ValueFacetResult<>(1L, "air conditioning"),
new ValueFacetResult<>(4L, "budget"),
new ValueFacetResult<>(1L, "coffee in lobby"),
new ValueFacetResult<>(2L, "concierge"),
new ValueFacetResult<>(1L, "motel"),
new ValueFacetResult<>(2L, "pool"),
new ValueFacetResult<>(1L, "restaurant"),
new ValueFacetResult<>(1L, "view"),
new ValueFacetResult<>(4L, "wifi"))));
}
}
@Test
public void canSearchWithLuceneSyntax() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
Map<String, Object> expectedResult = new HashMap<>();
expectedResult.put("HotelName", "Roach Motel");
expectedResult.put("Rating", 1);
SearchOptions searchOptions = new SearchOptions().setQueryType(QueryType.FULL).setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("HotelName:roch~", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(1, searchResultsList.size());
assertEquals(expectedResult, searchResultsList.get(0));
}
@Test
public void canFilterNonNullableType() {
client = setupClient(this::createIndexWithValueTypes);
List<Map<String, Object>> docsList = createDocsListWithValueTypes();
uploadDocuments(client, docsList);
List<Map<String, Object>> expectedDocsList =
docsList.stream()
.filter(d -> !d.get("Key").equals("789"))
.collect(Collectors.toList());
SearchOptions searchOptions = new SearchOptions()
.setFilter("IntValue eq 0 or (Bucket/BucketName eq 'B' and Bucket/Count lt 10)");
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(2, searchResultsList.size());
assertTrue(searchResultsList.containsAll(expectedDocsList));
}
@Test
public void canSearchWithSearchModeAll() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> response = getSearchResults(client
.search("Cheapest hotel", new SearchOptions().setQueryType(QueryType.SIMPLE).setSearchMode(SearchMode.ALL),
Context.NONE));
assertEquals(1, response.size());
assertEquals("2", response.get(0).get("HotelId"));
}
@Test
public void defaultSearchModeIsAny() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> response = getSearchResults(client.search("Cheapest hotel",
new SearchOptions().setOrderBy("HotelId"), Context.NONE));
assertEquals(7, response.size());
assertEquals(
Arrays.asList("1", "10", "2", "3", "4", "5", "9"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void canGetResultCountInSearch() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", new SearchOptions().setIncludeTotalCount(true),
Context.NONE);
assertNotNull(results);
assertEquals(hotels.size(), results.getTotalCount().intValue());
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertNotNull(iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void canSearchWithRegex() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setQueryType(QueryType.FULL)
.setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("HotelName:/.*oach.*\\/?/", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> resultsList = getSearchResults(results);
Map<String, Object> expectedHotel = new HashMap<>();
expectedHotel.put("HotelName", "Roach Motel");
expectedHotel.put("Rating", 1);
assertEquals(1, resultsList.size());
assertEquals(resultsList.get(0), expectedHotel);
}
@Test
public void canSearchWithEscapedSpecialCharsInRegex() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions().setQueryType(QueryType.FULL);
SearchPagedIterable results = client.search("\\+\\-\\&\\|\\!\\(\\)\\{\\}\\[\\]\\^\\~\\*\\?\\:", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> resultsList = getSearchResults(results);
assertEquals(0, resultsList.size());
}
@Test
public void searchWithScoringProfileBoostsScore() {
client = setupClient(this::createHotelIndex);
uploadDocumentsJson(client, HOTELS_DATA_JSON);
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("nearest")
.setScoringParameters(new ScoringParameter("myloc", new GeoPoint(-122.0, 49.0)))
.setFilter("Rating eq 5 or Rating eq 1")
.setOrderBy("HotelId desc");
List<Map<String, Object>> response = getSearchResults(client.search("hotel", searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals("2", response.get(0).get("HotelId").toString());
assertEquals("1", response.get(1).get("HotelId").toString());
}
@Test
public void searchWithScoringProfileEscaper() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("text")
.setScoringParameters(new ScoringParameter("mytag", Arrays.asList("concierge", "Hello, O''Brien")))
.setFilter("Rating eq 5 or Rating eq 1");
List<Map<String, Object>> response = getSearchResults(client.search("hotel",
searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals(
Arrays.asList("1", "2"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void searchWithScoringParametersEmpty() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("text")
.setScoringParameters(new ScoringParameter("mytag", Arrays.asList("", "concierge")))
.setFilter("Rating eq 5 or Rating eq 1");
List<Map<String, Object>> response = getSearchResults(client.search("hotel",
searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals(
Arrays.asList("1", "2"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void canSearchWithMinimumCoverage() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchPagedIterable results = client.search("*", new SearchOptions().setMinimumCoverage(50.0),
Context.NONE);
assertNotNull(results);
assertEquals(100.0, results.getCoverage(), 0);
}
@Test
public void canUseHitHighlighting() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
String description = "Description";
String category = "Category";
SearchOptions sp = new SearchOptions();
sp.setFilter("Rating eq 5");
sp.setHighlightPreTag("<b>");
sp.setHighlightPostTag("</b>");
sp.setHighlightFields(category, description);
SearchPagedIterable results = client.search("luxury hotel", sp, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse result = iterator.next();
List<SearchResult> documents = result.getValue();
assertEquals(1, documents.size());
Map<String, List<String>> highlights = documents.get(0).getHighlights();
assertNotNull(highlights);
assertEquals(2, highlights.keySet().size());
assertTrue(highlights.containsKey(description));
assertTrue(highlights.containsKey(category));
String categoryHighlight = highlights.get(category).get(0);
assertEquals("<b>Luxury</b>", categoryHighlight);
List<String> expectedDescriptionHighlights =
Arrays.asList(
"Best <b>hotel</b> in town if you like <b>luxury</b> <b>hotels</b>.",
"We highly recommend this <b>hotel</b>."
);
assertEquals(expectedDescriptionHighlights, highlights.get(description));
}
@Test
public void canSearchWithSynonyms() {
client = setupClient(this::createHotelIndex);
uploadDocumentsJson(client, HOTELS_DATA_JSON);
String fieldName = "HotelName";
SearchIndexClient searchIndexClient = getSearchIndexClientBuilder().buildClient();
synonymMapToDelete = searchIndexClient.createSynonymMap(new SynonymMap(
testResourceNamer.randomName("names", 32))
.setSynonyms("luxury,fancy")).getName();
SearchIndex hotelsIndex = searchIndexClient.getIndex(client.getIndexName());
hotelsIndex.getFields().stream()
.filter(f -> fieldName.equals(f.getName()))
.findFirst().get().setSynonymMapNames(synonymMapToDelete);
searchIndexClient.createOrUpdateIndex(hotelsIndex);
sleepIfRunningAgainstService(10000);
SearchOptions searchOptions = new SearchOptions()
.setQueryType(QueryType.FULL)
.setSearchFields(fieldName)
.setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("luxury", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> response = getSearchResults(results);
assertEquals(1, response.size());
assertEquals("Fancy Stay", response.get(0).get("HotelName"));
assertEquals(5, response.get(0).get("Rating"));
}
private List<Map<String, Object>> getSearchResults(SearchPagedIterable results) {
Iterator<SearchPagedResponse> resultsIterator = results.iterableByPage().iterator();
List<Map<String, Object>> searchResults = new ArrayList<>();
while (resultsIterator.hasNext()) {
SearchPagedResponse result = resultsIterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> searchResults.add(item.getDocument(SearchDocument.class)));
}
return searchResults;
}
private Map<String, Object> extractAndTransformSingleResult(SearchResult result) {
return convertHashMapToMap((result.getDocument(SearchDocument.class)));
}
/**
* Convert a HashMap object to Map object
*
* @param mapObject object to convert
* @return {@link Map}{@code <}{@link String}{@code ,}{@link Object}{@code >}
*/
@SuppressWarnings("unchecked")
private static Map<String, Object> convertHashMapToMap(Object mapObject) {
HashMap<String, Object> map = (HashMap<String, Object>) mapObject;
Set<Map.Entry<String, Object>> entries = map.entrySet();
Map<String, Object> convertedMap = new HashMap<>();
for (Map.Entry<String, Object> entry : entries) {
Object value = entry.getValue();
if (value instanceof HashMap) {
value = convertHashMapToMap(entry.getValue());
}
if (value instanceof ArrayList) {
value = convertArray((ArrayList<Object>) value);
}
convertedMap.put(entry.getKey(), value);
}
return convertedMap;
}
/**
* Convert Array Object elements
*
* @param array which elements will be converted
* @return {@link ArrayList}{@code <}{@link Object}{@code >}
*/
private static ArrayList<Object> convertArray(ArrayList<Object> array) {
ArrayList<Object> convertedArray = new ArrayList<>();
for (Object arrayValue : array) {
if (arrayValue instanceof HashMap) {
convertedArray.add(convertHashMapToMap(arrayValue));
} else {
convertedArray.add(arrayValue);
}
}
return convertedArray;
}
private void assertKeySequenceEqual(SearchPagedIterable results, List<String> expectedKeys) {
assertNotNull(results);
List<String> actualKeys = results.stream().filter(doc -> doc.getDocument(SearchDocument.class)
.containsKey("HotelId"))
.map(doc -> (String) doc.getDocument(SearchDocument.class).get("HotelId")).collect(Collectors.toList());
assertEquals(expectedKeys, actualKeys);
}
List<Map<String, Object>> createHotelsList(int count) {
List<Map<String, Object>> documents = new ArrayList<>();
for (int i = 1; i <= count; i++) {
Map<String, Object> doc = new HashMap<>();
doc.put("HotelId", Integer.toString(i));
doc.put("HotelName", "Hotel" + i);
doc.put("Description", "Desc" + i);
doc.put("Description_fr", "Desc_fr" + i);
doc.put("Category", "Catg" + i);
doc.put("Tags", Collections.singletonList("tag" + i));
doc.put("ParkingIncluded", false);
doc.put("SmokingAllowed", false);
doc.put("LastRenovationDate", OffsetDateTime.parse("2010-06-27T00:00:00Z"));
doc.put("Rating", i);
documents.add(doc);
}
return documents;
}
boolean compareResults(List<Map<String, Object>> searchResults, List<Map<String, Object>> hotels) {
Iterator<Map<String, Object>> searchIterator = searchResults.iterator();
Iterator<Map<String, Object>> hotelsIterator = hotels.iterator();
while (searchIterator.hasNext() && hotelsIterator.hasNext()) {
Map<String, Object> result = searchIterator.next();
Map<String, Object> hotel = hotelsIterator.next();
assertMapEquals(hotel, result, true, "properties");
}
return true;
}
<T> void assertRangeFacets(List<RangeFacetResult<T>> baseRateFacets, List<RangeFacetResult<T>> lastRenovationDateFacets) {
assertNull(baseRateFacets.get(0).getFrom());
assertEquals(5.0, baseRateFacets.get(0).getTo());
assertEquals(5.0, baseRateFacets.get(1).getFrom());
assertEquals(8.0, baseRateFacets.get(1).getTo());
assertEquals(8.0, baseRateFacets.get(2).getFrom());
assertEquals(10.0, baseRateFacets.get(2).getTo());
assertEquals(10.0, baseRateFacets.get(3).getFrom());
assertNull(baseRateFacets.get(3).getTo());
assertEquals(1, baseRateFacets.get(0).getCount().intValue());
assertEquals(1, baseRateFacets.get(1).getCount().intValue());
assertEquals(1, baseRateFacets.get(2).getCount().intValue());
assertEquals(0, baseRateFacets.get(3).getCount().intValue());
assertNull(lastRenovationDateFacets.get(0).getFrom());
assertEquals("2000-01-01T00:00:00.000+0000", lastRenovationDateFacets.get(0).getTo());
assertEquals("2000-01-01T00:00:00.000+0000", lastRenovationDateFacets.get(1).getFrom());
assertNull(lastRenovationDateFacets.get(1).getTo());
assertEquals(5, lastRenovationDateFacets.get(0).getCount().intValue());
assertEquals(2, lastRenovationDateFacets.get(1).getCount().intValue());
}
<T> List<RangeFacetResult<T>> getRangeFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
List<FacetResult> facetCollection = getFacetsForField(facets, expectedField, expectedCount);
return facetCollection.stream().map(RangeFacetResult<T>::new).collect(Collectors.toList());
}
<T> List<ValueFacetResult<T>> getValueFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
List<FacetResult> facetCollection = getFacetsForField(facets, expectedField, expectedCount);
return facetCollection.stream().map(ValueFacetResult<T>::new)
.collect(Collectors.toList());
}
private List<FacetResult> getFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
assertTrue(facets.containsKey(expectedField));
List<FacetResult> results = facets.get(expectedField);
assertEquals(expectedCount, results.size());
return results;
}
void assertContainHotelIds(List<Map<String, Object>> expected, List<SearchResult> actual) {
assertNotNull(actual);
Set<String> actualKeys = actual.stream().filter(item -> item.getDocument(SearchDocument.class)
.containsKey("HotelId")).map(item -> (String) item.getDocument(SearchDocument.class)
.get("HotelId")).collect(Collectors.toSet());
Set<String> expectedKeys = expected.stream().filter(item -> item.containsKey("HotelId"))
.map(item -> (String) item.get("HotelId")).collect(Collectors.toSet());
assertEquals(expectedKeys, actualKeys);
}
<T> void assertValueFacetsEqual(List<ValueFacetResult<T>> actualFacets, ArrayList<ValueFacetResult<T>> expectedFacets) {
assertEquals(expectedFacets.size(), actualFacets.size());
for (int i = 0; i < actualFacets.size(); i++) {
assertEquals(expectedFacets.get(i).getCount(), actualFacets.get(i).getCount());
assertEquals(expectedFacets.get(i).getValue(), actualFacets.get(i).getValue());
}
}
String getSearchResultId(SearchResult searchResult) {
return searchResult.getDocument(SearchDocument.class).get("HotelId").toString();
}
SearchOptions getSearchOptionsForRangeFacets() {
return new SearchOptions().setFacets("Rooms/BaseRate,values:5|8|10",
"LastRenovationDate,values:2000-01-01T00:00:00Z");
}
SearchOptions getSearchOptionsForValueFacets() {
return new SearchOptions().setFacets("Rating,count:2,sort:-value",
"SmokingAllowed,sort:count",
"Category",
"LastRenovationDate,interval:year",
"Rooms/BaseRate,sort:value",
"Tags,sort:value");
}
void assertListEqualHotelIds(List<String> expected, List<SearchResult> actual) {
assertNotNull(actual);
List<String> actualKeys = actual.stream().filter(item -> item.getDocument(SearchDocument.class)
.containsKey("HotelId")).map(item -> (String) item.getDocument(SearchDocument.class)
.get("HotelId")).collect(Collectors.toList());
assertEquals(expected, actualKeys);
}
String createIndexWithNonNullableTypes() {
SearchIndex index = new SearchIndex(testResourceNamer.randomName("non-nullable-index", 64))
.setFields(Arrays.asList(
new SearchField("Key", SearchFieldDataType.STRING)
.setHidden(false)
.setKey(true),
new SearchField("Rating", SearchFieldDataType.INT32)
.setHidden(false),
new SearchField("Count", SearchFieldDataType.INT64)
.setHidden(false),
new SearchField("IsEnabled", SearchFieldDataType.BOOLEAN)
.setHidden(false),
new SearchField("Ratio", SearchFieldDataType.DOUBLE)
.setHidden(false),
new SearchField("StartDate", SearchFieldDataType.DATE_TIME_OFFSET)
.setHidden(false),
new SearchField("EndDate", SearchFieldDataType.DATE_TIME_OFFSET)
.setHidden(false),
new SearchField("TopLevelBucket", SearchFieldDataType.COMPLEX)
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true))),
new SearchField("Buckets", SearchFieldDataType.collection(SearchFieldDataType.COMPLEX))
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true)))));
setupIndex(index);
return index.getName();
}
String createIndexWithValueTypes() {
SearchIndex index = new SearchIndex(testResourceNamer.randomName("testindex", 64))
.setFields(Arrays.asList(
new SearchField("Key", SearchFieldDataType.STRING)
.setKey(true)
.setSearchable(true),
new SearchField("IntValue", SearchFieldDataType.INT32)
.setFilterable(true),
new SearchField("Bucket", SearchFieldDataType.COMPLEX)
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true)
))
)
);
setupIndex(index);
return index.getName();
}
List<Map<String, Object>> createDocsListWithValueTypes() {
Map<String, Object> element1 = new HashMap<>();
element1.put("Key", "132");
element1.put("IntValue", 0);
Map<String, Object> subElement1 = new HashMap<>();
subElement1.put("BucketName", "A");
subElement1.put("Count", 3);
element1.put("Bucket", subElement1);
Map<String, Object> element2 = new HashMap<>();
element2.put("Key", "456");
element2.put("IntValue", 7);
Map<String, Object> subElement2 = new HashMap<>();
subElement2.put("BucketName", "B");
subElement2.put("Count", 5);
element2.put("Bucket", subElement2);
Map<String, Object> element3 = new HashMap<>();
element3.put("Key", "789");
element3.put("IntValue", 1);
Map<String, Object> subElement3 = new HashMap<>();
subElement3.put("BucketName", "B");
subElement3.put("Count", 99);
element3.put("Bucket", subElement3);
return Arrays.asList(element1, element2, element3);
}
} | class SearchSyncTests extends SearchTestBase {
private final List<String> indexesToDelete = new ArrayList<>();
private String synonymMapToDelete = "";
private static final String INDEX_NAME = "azsearch-search-shared-instance";
private static SearchIndexClient searchIndexClient;
private SearchClient client;
@BeforeAll
public static void setupClass() {
TestBase.setupClass();
if (TEST_MODE == TestMode.PLAYBACK) {
return;
}
searchIndexClient = setupSharedIndex(INDEX_NAME);
}
@Override
protected void afterTest() {
super.afterTest();
SearchIndexClient serviceClient = getSearchIndexClientBuilder().buildClient();
for (String index : indexesToDelete) {
serviceClient.deleteIndex(index);
}
if (!CoreUtils.isNullOrEmpty(synonymMapToDelete)) {
serviceClient.deleteSynonymMap(synonymMapToDelete);
sleepIfRunningAgainstService(5000);
}
}
@AfterAll
protected static void cleanupClass() {
if (TEST_MODE != TestMode.PLAYBACK) {
searchIndexClient.deleteIndex(INDEX_NAME);
}
}
private SearchClient setupClient(Supplier<String> indexSupplier) {
String indexName = indexSupplier.get();
indexesToDelete.add(indexName);
return getSearchClientBuilder(indexName).buildClient();
}
@Test
public void searchThrowsWhenRequestIsMalformed() {
SearchOptions invalidSearchOptions = new SearchOptions().setFilter("This is not a valid filter.");
assertHttpResponseException(
() -> search("*", invalidSearchOptions),
HttpURLConnection.HTTP_BAD_REQUEST,
"Invalid expression: Syntax error at position 7 in 'This is not a valid filter.'");
}
@Test
public void searchThrowsWhenSpecialCharInRegexIsUnescaped() {
SearchOptions invalidSearchOptions = new SearchOptions().setQueryType(QueryType.FULL);
assertHttpResponseException(
() -> search("/.*/.*/", invalidSearchOptions),
HttpURLConnection.HTTP_BAD_REQUEST,
"Failed to parse query string at line 1, column 8.");
}
private void search(String searchText, SearchOptions searchOptions) {
getSearchClientBuilder(INDEX_NAME).buildClient().search(searchText, searchOptions, Context.NONE)
.iterableByPage()
.iterator()
.next();
}
@Test
public void canSearchDynamicDocuments() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = uploadDocumentsJson(client, HOTELS_DATA_JSON_WITHOUT_FR_DESCRIPTION);
SearchPagedIterable searchResults = client.search("*");
assertNotNull(searchResults);
assertNull(searchResults.getTotalCount());
assertNull(searchResults.getCoverage());
assertNull(searchResults.getFacets());
Iterator<SearchPagedResponse> iterator = searchResults.iterableByPage().iterator();
List<Map<String, Object>> actualResults = new ArrayList<>(hotels.size());
while (iterator.hasNext()) {
SearchPagedResponse result = iterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> {
assertEquals(1, item.getScore(), 0);
assertNull(item.getHighlights());
actualResults.add(item.getDocument(SearchDocument.class));
});
}
assertEquals(hotels.size(), actualResults.size());
actualResults.sort(Comparator.comparing(doc -> Integer.parseInt(doc.get("HotelId").toString())));
assertTrue(compareResults(actualResults, hotels));
}
@Test
public void canContinueSearch() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = createHotelsList(100);
uploadDocuments(client, hotels);
SearchOptions searchOptions = new SearchOptions().setSelect("HotelId")
.setOrderBy("HotelId asc");
List<String> expectedHotelIds = hotels.stream().map(hotel -> (String) hotel.get("HotelId")).sorted()
.collect(Collectors.toList());
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse firstPage = iterator.next();
assertEquals(50, firstPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(0, 50), firstPage.getValue());
assertNotNull(firstPage.getContinuationToken());
SearchPagedResponse secondPage = iterator.next();
assertEquals(50, secondPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(50, 100), secondPage.getValue());
assertNull(secondPage.getContinuationToken());
}
@Test
public void canContinueSearchWithTop() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = createHotelsList(3000);
uploadDocuments(client, hotels);
SearchOptions searchOptions = new SearchOptions()
.setTop(2000)
.setSelect("HotelId")
.setOrderBy("HotelId asc");
List<String> expectedHotelIds = hotels.stream().map(hotel -> (String) hotel.get("HotelId")).sorted()
.collect(Collectors.toList());
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse firstPage = iterator.next();
assertEquals(1000, firstPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(0, 1000), firstPage.getValue());
assertNotNull(firstPage.getContinuationToken());
SearchPagedResponse secondPage = iterator.next();
assertEquals(1000, secondPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(1000, 2000), secondPage.getValue());
assertNull(secondPage.getContinuationToken());
}
@Test
public void canSearchStaticallyTypedDocuments() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = uploadDocumentsJson(client, HOTELS_DATA_JSON_WITHOUT_FR_DESCRIPTION);
SearchPagedIterable results = client.search("*", new SearchOptions(), Context.NONE);
assertNotNull(results);
assertNull(results.getTotalCount());
assertNull(results.getCoverage());
assertNull(results.getFacets());
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
List<Hotel> actualResults = new ArrayList<>(hotels.size());
while (iterator.hasNext()) {
SearchPagedResponse result = iterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> {
assertEquals(1, item.getScore(), 0);
assertNull(item.getHighlights());
Hotel hotel = item.getDocument(Hotel.class);
actualResults.add(hotel);
});
}
List<Hotel> hotelsList = hotels.stream()
.map(hotel -> convertMapToValue(hotel, Hotel.class))
.collect(Collectors.toList());
assertEquals(hotelsList.size(), actualResults.size());
actualResults.sort(Comparator.comparing(doc -> Integer.parseInt(doc.hotelId())));
for (int i = 0; i < hotelsList.size(); i++) {
assertObjectEquals(hotelsList.get(i), actualResults.get(i), true, "properties");
}
}
@SuppressWarnings("UseOfObsoleteDateTimeApi")
@Test
@SuppressWarnings("UseOfObsoleteDateTimeApi")
@Test
public void canSearchWithDateInStaticModel() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
OffsetDateTime expected = OffsetDateTime.parse("2010-06-27T00:00:00Z");
SearchPagedIterable results = client.search("Fancy", new SearchOptions(), Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertTrue(iterator.hasNext());
SearchPagedResponse result = iterator.next();
assertEquals(1, result.getValue().size());
Date actual = result.getValue().get(0).getDocument(Hotel.class).lastRenovationDate();
long epochMilli = expected.toInstant().toEpochMilli();
assertEquals(new Date(epochMilli), actual);
}
@Test
public void canSearchWithSelectedFields() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions sp = new SearchOptions();
sp.setSearchFields("HotelName", "Category");
sp.setSelect("HotelName", "Rating", "Address/City", "Rooms/Type");
SearchPagedIterable results = client.search("fancy luxury secret", sp, Context.NONE);
HashMap<String, Object> expectedHotel1 = new HashMap<>();
expectedHotel1.put("HotelName", "Fancy Stay");
expectedHotel1.put("Rating", 5);
expectedHotel1.put("Address", null);
expectedHotel1.put("Rooms", Collections.emptyList());
HashMap<String, Object> expectedHotel2 = new HashMap<>();
expectedHotel2.put("HotelName", "Secret Point Motel");
expectedHotel2.put("Rating", 4);
HashMap<String, Object> address = new HashMap<>();
address.put("City", "New York");
expectedHotel2.put("Address", address);
HashMap<String, Object> rooms = new HashMap<>();
rooms.put("Type", "Budget Room");
HashMap<String, Object> rooms2 = new HashMap<>();
rooms2.put("Type", "Budget Room");
expectedHotel2.put("Rooms", Arrays.asList(rooms, rooms2));
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse result = iterator.next();
assertEquals(2, result.getValue().size());
Map<String, Object> hotel1 = extractAndTransformSingleResult(result.getValue().get(0));
Map<String, Object> hotel2 = extractAndTransformSingleResult(result.getValue().get(1));
assertMapEquals(expectedHotel1, hotel1, true);
assertMapEquals(expectedHotel2, hotel2, true);
}
@Test
public void canUseTopAndSkipForClientSidePaging() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions parameters = new SearchOptions().setTop(3).setSkip(0).setOrderBy("HotelId");
SearchPagedIterable results = client.search("*", parameters, Context.NONE);
assertKeySequenceEqual(results, Arrays.asList("1", "10", "2"));
parameters.setSkip(3);
results = client.search("*", parameters, Context.NONE);
assertKeySequenceEqual(results, Arrays.asList("3", "4", "5"));
}
@Test
public void searchWithoutOrderBySortsByScore() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
Iterator<SearchResult> results = client
.search("*", new SearchOptions().setFilter("Rating lt 4"), Context.NONE).iterator();
SearchResult firstResult = results.next();
SearchResult secondResult = results.next();
assertTrue(firstResult.getScore() <= secondResult.getScore());
}
@Test
public void orderByProgressivelyBreaksTies() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
String[] expectedResults = new String[]{"1", "9", "3", "4", "5", "10", "2", "6", "7", "8"};
Stream<String> results = client
.search("*", new SearchOptions().setOrderBy("Rating desc", "LastRenovationDate asc", "HotelId"),
Context.NONE).stream()
.map(this::getSearchResultId);
assertArrayEquals(results.toArray(), expectedResults);
}
@Test
public void canFilter() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setFilter("Rating gt 3 and LastRenovationDate gt 2000-01-01T00:00:00Z")
.setOrderBy("HotelId asc");
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(2, searchResultsList.size());
assertEquals("1", searchResultsList.get(0).get("HotelId").toString());
assertEquals("5", searchResultsList.get(1).get("HotelId").toString());
}
@Test
public void canSearchWithRangeFacets() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", getSearchOptionsForRangeFacets(),
Context.NONE);
assertNotNull(results.getFacets());
List<RangeFacetResult<String>> baseRateFacets = getRangeFacetsForField(results.getFacets(),
"Rooms/BaseRate", 4);
List<RangeFacetResult<String>> lastRenovationDateFacets = getRangeFacetsForField(
results.getFacets(), "LastRenovationDate", 2);
assertRangeFacets(baseRateFacets, lastRenovationDateFacets);
for (SearchPagedResponse result : results.iterableByPage()) {
assertContainHotelIds(hotels, result.getValue());
}
}
@Test
public void canSearchWithValueFacets() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", getSearchOptionsForValueFacets(),
Context.NONE);
Map<String, List<FacetResult>> facets = results.getFacets();
assertNotNull(facets);
for (SearchPagedResponse result : results.iterableByPage()) {
assertContainHotelIds(hotels, result.getValue());
assertValueFacetsEqual(
getValueFacetsForField(facets, "Rating", 2),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, 5),
new ValueFacetResult<>(4L, 4))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "SmokingAllowed", 2),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(4L, false),
new ValueFacetResult<>(2L, true))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Category", 3),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(5L, "Budget"),
new ValueFacetResult<>(1L, "Boutique"),
new ValueFacetResult<>(1L, "Luxury"))));
assertValueFacetsEqual(getValueFacetsForField(facets, "LastRenovationDate", 6),
new ArrayList<>(Arrays.asList(new ValueFacetResult<>(1L, OffsetDateTime.parse("1970-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("1982-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(2L, OffsetDateTime.parse("1995-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("1999-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("2010-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("2012-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Rooms/BaseRate", 4),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, 2.44),
new ValueFacetResult<>(1L, 7.69),
new ValueFacetResult<>(1L, 8.09),
new ValueFacetResult<>(1L, 9.69))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Tags", 10),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, "24-hour front desk service"),
new ValueFacetResult<>(1L, "air conditioning"),
new ValueFacetResult<>(4L, "budget"),
new ValueFacetResult<>(1L, "coffee in lobby"),
new ValueFacetResult<>(2L, "concierge"),
new ValueFacetResult<>(1L, "motel"),
new ValueFacetResult<>(2L, "pool"),
new ValueFacetResult<>(1L, "restaurant"),
new ValueFacetResult<>(1L, "view"),
new ValueFacetResult<>(4L, "wifi"))));
}
}
@Test
public void canSearchWithLuceneSyntax() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
Map<String, Object> expectedResult = new HashMap<>();
expectedResult.put("HotelName", "Roach Motel");
expectedResult.put("Rating", 1);
SearchOptions searchOptions = new SearchOptions().setQueryType(QueryType.FULL).setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("HotelName:roch~", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(1, searchResultsList.size());
assertEquals(expectedResult, searchResultsList.get(0));
}
@Test
public void canFilterNonNullableType() {
client = setupClient(this::createIndexWithValueTypes);
List<Map<String, Object>> docsList = createDocsListWithValueTypes();
uploadDocuments(client, docsList);
List<Map<String, Object>> expectedDocsList =
docsList.stream()
.filter(d -> !d.get("Key").equals("789"))
.collect(Collectors.toList());
SearchOptions searchOptions = new SearchOptions()
.setFilter("IntValue eq 0 or (Bucket/BucketName eq 'B' and Bucket/Count lt 10)");
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(2, searchResultsList.size());
assertTrue(searchResultsList.containsAll(expectedDocsList));
}
@Test
public void canSearchWithSearchModeAll() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> response = getSearchResults(client
.search("Cheapest hotel", new SearchOptions().setQueryType(QueryType.SIMPLE).setSearchMode(SearchMode.ALL),
Context.NONE));
assertEquals(1, response.size());
assertEquals("2", response.get(0).get("HotelId"));
}
@Test
public void defaultSearchModeIsAny() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> response = getSearchResults(client.search("Cheapest hotel",
new SearchOptions().setOrderBy("HotelId"), Context.NONE));
assertEquals(7, response.size());
assertEquals(
Arrays.asList("1", "10", "2", "3", "4", "5", "9"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void canGetResultCountInSearch() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", new SearchOptions().setIncludeTotalCount(true),
Context.NONE);
assertNotNull(results);
assertEquals(hotels.size(), results.getTotalCount().intValue());
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertNotNull(iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void canSearchWithRegex() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setQueryType(QueryType.FULL)
.setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("HotelName:/.*oach.*\\/?/", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> resultsList = getSearchResults(results);
Map<String, Object> expectedHotel = new HashMap<>();
expectedHotel.put("HotelName", "Roach Motel");
expectedHotel.put("Rating", 1);
assertEquals(1, resultsList.size());
assertEquals(resultsList.get(0), expectedHotel);
}
@Test
public void canSearchWithEscapedSpecialCharsInRegex() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions().setQueryType(QueryType.FULL);
SearchPagedIterable results = client.search("\\+\\-\\&\\|\\!\\(\\)\\{\\}\\[\\]\\^\\~\\*\\?\\:", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> resultsList = getSearchResults(results);
assertEquals(0, resultsList.size());
}
@Test
public void searchWithScoringProfileBoostsScore() {
client = setupClient(this::createHotelIndex);
uploadDocumentsJson(client, HOTELS_DATA_JSON);
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("nearest")
.setScoringParameters(new ScoringParameter("myloc", new GeoPoint(-122.0, 49.0)))
.setFilter("Rating eq 5 or Rating eq 1")
.setOrderBy("HotelId desc");
List<Map<String, Object>> response = getSearchResults(client.search("hotel", searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals("2", response.get(0).get("HotelId").toString());
assertEquals("1", response.get(1).get("HotelId").toString());
}
@Test
public void searchWithScoringProfileEscaper() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("text")
.setScoringParameters(new ScoringParameter("mytag", Arrays.asList("concierge", "Hello, O''Brien")))
.setFilter("Rating eq 5 or Rating eq 1");
List<Map<String, Object>> response = getSearchResults(client.search("hotel",
searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals(
Arrays.asList("1", "2"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void searchWithScoringParametersEmpty() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("text")
.setScoringParameters(new ScoringParameter("mytag", Arrays.asList("", "concierge")))
.setFilter("Rating eq 5 or Rating eq 1");
List<Map<String, Object>> response = getSearchResults(client.search("hotel",
searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals(
Arrays.asList("1", "2"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void canSearchWithMinimumCoverage() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchPagedIterable results = client.search("*", new SearchOptions().setMinimumCoverage(50.0),
Context.NONE);
assertNotNull(results);
assertEquals(100.0, results.getCoverage(), 0);
}
@Test
public void canUseHitHighlighting() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
String description = "Description";
String category = "Category";
SearchOptions sp = new SearchOptions();
sp.setFilter("Rating eq 5");
sp.setHighlightPreTag("<b>");
sp.setHighlightPostTag("</b>");
sp.setHighlightFields(category, description);
SearchPagedIterable results = client.search("luxury hotel", sp, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse result = iterator.next();
List<SearchResult> documents = result.getValue();
assertEquals(1, documents.size());
Map<String, List<String>> highlights = documents.get(0).getHighlights();
assertNotNull(highlights);
assertEquals(2, highlights.keySet().size());
assertTrue(highlights.containsKey(description));
assertTrue(highlights.containsKey(category));
String categoryHighlight = highlights.get(category).get(0);
assertEquals("<b>Luxury</b>", categoryHighlight);
List<String> expectedDescriptionHighlights =
Arrays.asList(
"Best <b>hotel</b> in town if you like <b>luxury</b> <b>hotels</b>.",
"We highly recommend this <b>hotel</b>."
);
assertEquals(expectedDescriptionHighlights, highlights.get(description));
}
@Test
public void canSearchWithSynonyms() {
client = setupClient(this::createHotelIndex);
uploadDocumentsJson(client, HOTELS_DATA_JSON);
String fieldName = "HotelName";
SearchIndexClient searchIndexClient = getSearchIndexClientBuilder().buildClient();
synonymMapToDelete = searchIndexClient.createSynonymMap(new SynonymMap(
testResourceNamer.randomName("names", 32))
.setSynonyms("luxury,fancy")).getName();
SearchIndex hotelsIndex = searchIndexClient.getIndex(client.getIndexName());
hotelsIndex.getFields().stream()
.filter(f -> fieldName.equals(f.getName()))
.findFirst().get().setSynonymMapNames(synonymMapToDelete);
searchIndexClient.createOrUpdateIndex(hotelsIndex);
sleepIfRunningAgainstService(10000);
SearchOptions searchOptions = new SearchOptions()
.setQueryType(QueryType.FULL)
.setSearchFields(fieldName)
.setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("luxury", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> response = getSearchResults(results);
assertEquals(1, response.size());
assertEquals("Fancy Stay", response.get(0).get("HotelName"));
assertEquals(5, response.get(0).get("Rating"));
}
private List<Map<String, Object>> getSearchResults(SearchPagedIterable results) {
Iterator<SearchPagedResponse> resultsIterator = results.iterableByPage().iterator();
List<Map<String, Object>> searchResults = new ArrayList<>();
while (resultsIterator.hasNext()) {
SearchPagedResponse result = resultsIterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> searchResults.add(item.getDocument(SearchDocument.class)));
}
return searchResults;
}
private Map<String, Object> extractAndTransformSingleResult(SearchResult result) {
return convertHashMapToMap((result.getDocument(SearchDocument.class)));
}
/**
* Convert a HashMap object to Map object
*
* @param mapObject object to convert
* @return {@link Map}{@code <}{@link String}{@code ,}{@link Object}{@code >}
*/
@SuppressWarnings("unchecked")
private static Map<String, Object> convertHashMapToMap(Object mapObject) {
HashMap<String, Object> map = (HashMap<String, Object>) mapObject;
Set<Map.Entry<String, Object>> entries = map.entrySet();
Map<String, Object> convertedMap = new HashMap<>();
for (Map.Entry<String, Object> entry : entries) {
Object value = entry.getValue();
if (value instanceof HashMap) {
value = convertHashMapToMap(entry.getValue());
}
if (value instanceof ArrayList) {
value = convertArray((ArrayList<Object>) value);
}
convertedMap.put(entry.getKey(), value);
}
return convertedMap;
}
/**
* Convert Array Object elements
*
* @param array which elements will be converted
* @return {@link ArrayList}{@code <}{@link Object}{@code >}
*/
private static ArrayList<Object> convertArray(ArrayList<Object> array) {
ArrayList<Object> convertedArray = new ArrayList<>();
for (Object arrayValue : array) {
if (arrayValue instanceof HashMap) {
convertedArray.add(convertHashMapToMap(arrayValue));
} else {
convertedArray.add(arrayValue);
}
}
return convertedArray;
}
private void assertKeySequenceEqual(SearchPagedIterable results, List<String> expectedKeys) {
assertNotNull(results);
List<String> actualKeys = results.stream().filter(doc -> doc.getDocument(SearchDocument.class)
.containsKey("HotelId"))
.map(doc -> (String) doc.getDocument(SearchDocument.class).get("HotelId")).collect(Collectors.toList());
assertEquals(expectedKeys, actualKeys);
}
List<Map<String, Object>> createHotelsList(int count) {
List<Map<String, Object>> documents = new ArrayList<>();
for (int i = 1; i <= count; i++) {
Map<String, Object> doc = new HashMap<>();
doc.put("HotelId", Integer.toString(i));
doc.put("HotelName", "Hotel" + i);
doc.put("Description", "Desc" + i);
doc.put("Description_fr", "Desc_fr" + i);
doc.put("Category", "Catg" + i);
doc.put("Tags", Collections.singletonList("tag" + i));
doc.put("ParkingIncluded", false);
doc.put("SmokingAllowed", false);
doc.put("LastRenovationDate", OffsetDateTime.parse("2010-06-27T00:00:00Z"));
doc.put("Rating", i);
documents.add(doc);
}
return documents;
}
boolean compareResults(List<Map<String, Object>> searchResults, List<Map<String, Object>> hotels) {
Iterator<Map<String, Object>> searchIterator = searchResults.iterator();
Iterator<Map<String, Object>> hotelsIterator = hotels.iterator();
while (searchIterator.hasNext() && hotelsIterator.hasNext()) {
Map<String, Object> result = searchIterator.next();
Map<String, Object> hotel = hotelsIterator.next();
assertMapEquals(hotel, result, true, "properties");
}
return true;
}
<T> void assertRangeFacets(List<RangeFacetResult<T>> baseRateFacets, List<RangeFacetResult<T>> lastRenovationDateFacets) {
assertNull(baseRateFacets.get(0).getFrom());
assertEquals(5.0, baseRateFacets.get(0).getTo());
assertEquals(5.0, baseRateFacets.get(1).getFrom());
assertEquals(8.0, baseRateFacets.get(1).getTo());
assertEquals(8.0, baseRateFacets.get(2).getFrom());
assertEquals(10.0, baseRateFacets.get(2).getTo());
assertEquals(10.0, baseRateFacets.get(3).getFrom());
assertNull(baseRateFacets.get(3).getTo());
assertEquals(1, baseRateFacets.get(0).getCount().intValue());
assertEquals(1, baseRateFacets.get(1).getCount().intValue());
assertEquals(1, baseRateFacets.get(2).getCount().intValue());
assertEquals(0, baseRateFacets.get(3).getCount().intValue());
assertNull(lastRenovationDateFacets.get(0).getFrom());
assertEquals("2000-01-01T00:00:00.000+0000", lastRenovationDateFacets.get(0).getTo());
assertEquals("2000-01-01T00:00:00.000+0000", lastRenovationDateFacets.get(1).getFrom());
assertNull(lastRenovationDateFacets.get(1).getTo());
assertEquals(5, lastRenovationDateFacets.get(0).getCount().intValue());
assertEquals(2, lastRenovationDateFacets.get(1).getCount().intValue());
}
<T> List<RangeFacetResult<T>> getRangeFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
List<FacetResult> facetCollection = getFacetsForField(facets, expectedField, expectedCount);
return facetCollection.stream().map(RangeFacetResult<T>::new).collect(Collectors.toList());
}
<T> List<ValueFacetResult<T>> getValueFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
List<FacetResult> facetCollection = getFacetsForField(facets, expectedField, expectedCount);
return facetCollection.stream().map(ValueFacetResult<T>::new)
.collect(Collectors.toList());
}
private List<FacetResult> getFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
assertTrue(facets.containsKey(expectedField));
List<FacetResult> results = facets.get(expectedField);
assertEquals(expectedCount, results.size());
return results;
}
void assertContainHotelIds(List<Map<String, Object>> expected, List<SearchResult> actual) {
assertNotNull(actual);
Set<String> actualKeys = actual.stream().filter(item -> item.getDocument(SearchDocument.class)
.containsKey("HotelId")).map(item -> (String) item.getDocument(SearchDocument.class)
.get("HotelId")).collect(Collectors.toSet());
Set<String> expectedKeys = expected.stream().filter(item -> item.containsKey("HotelId"))
.map(item -> (String) item.get("HotelId")).collect(Collectors.toSet());
assertEquals(expectedKeys, actualKeys);
}
<T> void assertValueFacetsEqual(List<ValueFacetResult<T>> actualFacets, ArrayList<ValueFacetResult<T>> expectedFacets) {
assertEquals(expectedFacets.size(), actualFacets.size());
for (int i = 0; i < actualFacets.size(); i++) {
assertEquals(expectedFacets.get(i).getCount(), actualFacets.get(i).getCount());
assertEquals(expectedFacets.get(i).getValue(), actualFacets.get(i).getValue());
}
}
String getSearchResultId(SearchResult searchResult) {
return searchResult.getDocument(SearchDocument.class).get("HotelId").toString();
}
SearchOptions getSearchOptionsForRangeFacets() {
return new SearchOptions().setFacets("Rooms/BaseRate,values:5|8|10",
"LastRenovationDate,values:2000-01-01T00:00:00Z");
}
SearchOptions getSearchOptionsForValueFacets() {
return new SearchOptions().setFacets("Rating,count:2,sort:-value",
"SmokingAllowed,sort:count",
"Category",
"LastRenovationDate,interval:year",
"Rooms/BaseRate,sort:value",
"Tags,sort:value");
}
void assertListEqualHotelIds(List<String> expected, List<SearchResult> actual) {
assertNotNull(actual);
List<String> actualKeys = actual.stream().filter(item -> item.getDocument(SearchDocument.class)
.containsKey("HotelId")).map(item -> (String) item.getDocument(SearchDocument.class)
.get("HotelId")).collect(Collectors.toList());
assertEquals(expected, actualKeys);
}
String createIndexWithNonNullableTypes() {
SearchIndex index = new SearchIndex(testResourceNamer.randomName("non-nullable-index", 64))
.setFields(Arrays.asList(
new SearchField("Key", SearchFieldDataType.STRING)
.setHidden(false)
.setKey(true),
new SearchField("Rating", SearchFieldDataType.INT32)
.setHidden(false),
new SearchField("Count", SearchFieldDataType.INT64)
.setHidden(false),
new SearchField("IsEnabled", SearchFieldDataType.BOOLEAN)
.setHidden(false),
new SearchField("Ratio", SearchFieldDataType.DOUBLE)
.setHidden(false),
new SearchField("StartDate", SearchFieldDataType.DATE_TIME_OFFSET)
.setHidden(false),
new SearchField("EndDate", SearchFieldDataType.DATE_TIME_OFFSET)
.setHidden(false),
new SearchField("TopLevelBucket", SearchFieldDataType.COMPLEX)
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true))),
new SearchField("Buckets", SearchFieldDataType.collection(SearchFieldDataType.COMPLEX))
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true)))));
setupIndex(index);
return index.getName();
}
String createIndexWithValueTypes() {
SearchIndex index = new SearchIndex(testResourceNamer.randomName("testindex", 64))
.setFields(Arrays.asList(
new SearchField("Key", SearchFieldDataType.STRING)
.setKey(true)
.setSearchable(true),
new SearchField("IntValue", SearchFieldDataType.INT32)
.setFilterable(true),
new SearchField("Bucket", SearchFieldDataType.COMPLEX)
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true)
))
)
);
setupIndex(index);
return index.getName();
}
List<Map<String, Object>> createDocsListWithValueTypes() {
Map<String, Object> element1 = new HashMap<>();
element1.put("Key", "132");
element1.put("IntValue", 0);
Map<String, Object> subElement1 = new HashMap<>();
subElement1.put("BucketName", "A");
subElement1.put("Count", 3);
element1.put("Bucket", subElement1);
Map<String, Object> element2 = new HashMap<>();
element2.put("Key", "456");
element2.put("IntValue", 7);
Map<String, Object> subElement2 = new HashMap<>();
subElement2.put("BucketName", "B");
subElement2.put("Count", 5);
element2.put("Bucket", subElement2);
Map<String, Object> element3 = new HashMap<>();
element3.put("Key", "789");
element3.put("IntValue", 1);
Map<String, Object> subElement3 = new HashMap<>();
subElement3.put("BucketName", "B");
subElement3.put("Count", 99);
element3.put("Bucket", subElement3);
return Arrays.asList(element1, element2, element3);
}
} |
Another option for the helper method is taking two `List<T>` and a `Function<T, String>` which handles extracting the document key and the helper method creates the `Map`s. | public void canRoundTripNonNullableValueTypes() {
client = setupClient(this::createIndexWithNonNullableTypes);
Date startEpoch = Date.from(Instant.ofEpochMilli(1275346800000L));
NonNullableModel doc1 = new NonNullableModel()
.key("132")
.count(3)
.isEnabled(true)
.rating(5)
.ratio(3.25)
.startDate(new Date(startEpoch.getTime()))
.endDate(new Date(startEpoch.getTime()))
.topLevelBucket(new Bucket().bucketName("A").count(12))
.buckets(new Bucket[]{new Bucket().bucketName("B").count(20), new Bucket().bucketName("C").count(7)});
NonNullableModel doc2 = new NonNullableModel().key("456").buckets(new Bucket[]{});
uploadDocuments(client, Arrays.asList(doc1, doc2));
SearchPagedIterable results = client.search("*", new SearchOptions(), Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertTrue(iterator.hasNext());
SearchPagedResponse result = iterator.next();
assertEquals(2, result.getValue().size());
try {
assertObjectEquals(doc1, result.getValue().get(0).getDocument(NonNullableModel.class), true);
assertObjectEquals(doc2, result.getValue().get(1).getDocument(NonNullableModel.class), true);
} catch (AssertionError e) {
assertObjectEquals(doc2, result.getValue().get(0).getDocument(NonNullableModel.class), true);
assertObjectEquals(doc1, result.getValue().get(1).getDocument(NonNullableModel.class), true);
}
} | try { | public void canRoundTripNonNullableValueTypes() {
client = setupClient(this::createIndexWithNonNullableTypes);
Date startEpoch = Date.from(Instant.ofEpochMilli(1275346800000L));
NonNullableModel doc1 = new NonNullableModel()
.key("132")
.count(3)
.isEnabled(true)
.rating(5)
.ratio(3.25)
.startDate(new Date(startEpoch.getTime()))
.endDate(new Date(startEpoch.getTime()))
.topLevelBucket(new Bucket().bucketName("A").count(12))
.buckets(new Bucket[]{new Bucket().bucketName("B").count(20), new Bucket().bucketName("C").count(7)});
NonNullableModel doc2 = new NonNullableModel().key("456").buckets(new Bucket[]{});
uploadDocuments(client, Arrays.asList(doc1, doc2));
SearchPagedIterable results = client.search("*", new SearchOptions(), Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertTrue(iterator.hasNext());
SearchPagedResponse result = iterator.next();
assertEquals(2, result.getValue().size());
List<NonNullableModel> expectedDocuments = new ArrayList<>();
expectedDocuments.add(doc1); expectedDocuments.add(doc2);
List<NonNullableModel> actualDocuments = new ArrayList<>();
result.getValue().forEach(val -> actualDocuments.add(val.getDocument(NonNullableModel.class)));
assertTrue(equalDocumentSets(expectedDocuments, actualDocuments));
} | class SearchSyncTests extends SearchTestBase {
private final List<String> indexesToDelete = new ArrayList<>();
private String synonymMapToDelete = "";
private static final String INDEX_NAME = "azsearch-search-shared-instance";
private static SearchIndexClient searchIndexClient;
private SearchClient client;
@BeforeAll
public static void setupClass() {
TestBase.setupClass();
if (TEST_MODE == TestMode.PLAYBACK) {
return;
}
searchIndexClient = setupSharedIndex(INDEX_NAME);
}
@Override
protected void afterTest() {
super.afterTest();
SearchIndexClient serviceClient = getSearchIndexClientBuilder().buildClient();
for (String index : indexesToDelete) {
serviceClient.deleteIndex(index);
}
if (!CoreUtils.isNullOrEmpty(synonymMapToDelete)) {
serviceClient.deleteSynonymMap(synonymMapToDelete);
sleepIfRunningAgainstService(5000);
}
}
@AfterAll
protected static void cleanupClass() {
if (TEST_MODE != TestMode.PLAYBACK) {
searchIndexClient.deleteIndex(INDEX_NAME);
}
}
private SearchClient setupClient(Supplier<String> indexSupplier) {
String indexName = indexSupplier.get();
indexesToDelete.add(indexName);
return getSearchClientBuilder(indexName).buildClient();
}
@Test
public void searchThrowsWhenRequestIsMalformed() {
SearchOptions invalidSearchOptions = new SearchOptions().setFilter("This is not a valid filter.");
assertHttpResponseException(
() -> search("*", invalidSearchOptions),
HttpURLConnection.HTTP_BAD_REQUEST,
"Invalid expression: Syntax error at position 7 in 'This is not a valid filter.'");
}
@Test
public void searchThrowsWhenSpecialCharInRegexIsUnescaped() {
SearchOptions invalidSearchOptions = new SearchOptions().setQueryType(QueryType.FULL);
assertHttpResponseException(
() -> search("/.*/.*/", invalidSearchOptions),
HttpURLConnection.HTTP_BAD_REQUEST,
"Failed to parse query string at line 1, column 8.");
}
private void search(String searchText, SearchOptions searchOptions) {
getSearchClientBuilder(INDEX_NAME).buildClient().search(searchText, searchOptions, Context.NONE)
.iterableByPage()
.iterator()
.next();
}
@Test
public void canSearchDynamicDocuments() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = uploadDocumentsJson(client, HOTELS_DATA_JSON_WITHOUT_FR_DESCRIPTION);
SearchPagedIterable searchResults = client.search("*");
assertNotNull(searchResults);
assertNull(searchResults.getTotalCount());
assertNull(searchResults.getCoverage());
assertNull(searchResults.getFacets());
Iterator<SearchPagedResponse> iterator = searchResults.iterableByPage().iterator();
List<Map<String, Object>> actualResults = new ArrayList<>(hotels.size());
while (iterator.hasNext()) {
SearchPagedResponse result = iterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> {
assertEquals(1, item.getScore(), 0);
assertNull(item.getHighlights());
actualResults.add(item.getDocument(SearchDocument.class));
});
}
assertEquals(hotels.size(), actualResults.size());
actualResults.sort(Comparator.comparing(doc -> Integer.parseInt(doc.get("HotelId").toString())));
assertTrue(compareResults(actualResults, hotels));
}
@Test
public void canContinueSearch() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = createHotelsList(100);
uploadDocuments(client, hotels);
SearchOptions searchOptions = new SearchOptions().setSelect("HotelId")
.setOrderBy("HotelId asc");
List<String> expectedHotelIds = hotels.stream().map(hotel -> (String) hotel.get("HotelId")).sorted()
.collect(Collectors.toList());
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse firstPage = iterator.next();
assertEquals(50, firstPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(0, 50), firstPage.getValue());
assertNotNull(firstPage.getContinuationToken());
SearchPagedResponse secondPage = iterator.next();
assertEquals(50, secondPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(50, 100), secondPage.getValue());
assertNull(secondPage.getContinuationToken());
}
@Test
public void canContinueSearchWithTop() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = createHotelsList(3000);
uploadDocuments(client, hotels);
SearchOptions searchOptions = new SearchOptions()
.setTop(2000)
.setSelect("HotelId")
.setOrderBy("HotelId asc");
List<String> expectedHotelIds = hotels.stream().map(hotel -> (String) hotel.get("HotelId")).sorted()
.collect(Collectors.toList());
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse firstPage = iterator.next();
assertEquals(1000, firstPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(0, 1000), firstPage.getValue());
assertNotNull(firstPage.getContinuationToken());
SearchPagedResponse secondPage = iterator.next();
assertEquals(1000, secondPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(1000, 2000), secondPage.getValue());
assertNull(secondPage.getContinuationToken());
}
@Test
public void canSearchStaticallyTypedDocuments() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = uploadDocumentsJson(client, HOTELS_DATA_JSON_WITHOUT_FR_DESCRIPTION);
SearchPagedIterable results = client.search("*", new SearchOptions(), Context.NONE);
assertNotNull(results);
assertNull(results.getTotalCount());
assertNull(results.getCoverage());
assertNull(results.getFacets());
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
List<Hotel> actualResults = new ArrayList<>(hotels.size());
while (iterator.hasNext()) {
SearchPagedResponse result = iterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> {
assertEquals(1, item.getScore(), 0);
assertNull(item.getHighlights());
Hotel hotel = item.getDocument(Hotel.class);
actualResults.add(hotel);
});
}
List<Hotel> hotelsList = hotels.stream()
.map(hotel -> convertMapToValue(hotel, Hotel.class))
.collect(Collectors.toList());
assertEquals(hotelsList.size(), actualResults.size());
actualResults.sort(Comparator.comparing(doc -> Integer.parseInt(doc.hotelId())));
for (int i = 0; i < hotelsList.size(); i++) {
assertObjectEquals(hotelsList.get(i), actualResults.get(i), true, "properties");
}
}
@SuppressWarnings("UseOfObsoleteDateTimeApi")
@Test
@SuppressWarnings("UseOfObsoleteDateTimeApi")
@Test
public void canSearchWithDateInStaticModel() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
OffsetDateTime expected = OffsetDateTime.parse("2010-06-27T00:00:00Z");
SearchPagedIterable results = client.search("Fancy", new SearchOptions(), Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertTrue(iterator.hasNext());
SearchPagedResponse result = iterator.next();
assertEquals(1, result.getValue().size());
Date actual = result.getValue().get(0).getDocument(Hotel.class).lastRenovationDate();
long epochMilli = expected.toInstant().toEpochMilli();
assertEquals(new Date(epochMilli), actual);
}
@Test
public void canSearchWithSelectedFields() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions sp = new SearchOptions();
sp.setSearchFields("HotelName", "Category");
sp.setSelect("HotelName", "Rating", "Address/City", "Rooms/Type");
SearchPagedIterable results = client.search("fancy luxury secret", sp, Context.NONE);
HashMap<String, Object> expectedHotel1 = new HashMap<>();
expectedHotel1.put("HotelName", "Fancy Stay");
expectedHotel1.put("Rating", 5);
expectedHotel1.put("Address", null);
expectedHotel1.put("Rooms", Collections.emptyList());
HashMap<String, Object> expectedHotel2 = new HashMap<>();
expectedHotel2.put("HotelName", "Secret Point Motel");
expectedHotel2.put("Rating", 4);
HashMap<String, Object> address = new HashMap<>();
address.put("City", "New York");
expectedHotel2.put("Address", address);
HashMap<String, Object> rooms = new HashMap<>();
rooms.put("Type", "Budget Room");
HashMap<String, Object> rooms2 = new HashMap<>();
rooms2.put("Type", "Budget Room");
expectedHotel2.put("Rooms", Arrays.asList(rooms, rooms2));
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse result = iterator.next();
assertEquals(2, result.getValue().size());
Map<String, Object> hotel1 = extractAndTransformSingleResult(result.getValue().get(0));
Map<String, Object> hotel2 = extractAndTransformSingleResult(result.getValue().get(1));
assertMapEquals(expectedHotel1, hotel1, true);
assertMapEquals(expectedHotel2, hotel2, true);
}
@Test
public void canUseTopAndSkipForClientSidePaging() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions parameters = new SearchOptions().setTop(3).setSkip(0).setOrderBy("HotelId");
SearchPagedIterable results = client.search("*", parameters, Context.NONE);
assertKeySequenceEqual(results, Arrays.asList("1", "10", "2"));
parameters.setSkip(3);
results = client.search("*", parameters, Context.NONE);
assertKeySequenceEqual(results, Arrays.asList("3", "4", "5"));
}
@Test
public void searchWithoutOrderBySortsByScore() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
Iterator<SearchResult> results = client
.search("*", new SearchOptions().setFilter("Rating lt 4"), Context.NONE).iterator();
SearchResult firstResult = results.next();
SearchResult secondResult = results.next();
assertTrue(firstResult.getScore() <= secondResult.getScore());
}
@Test
public void orderByProgressivelyBreaksTies() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
String[] expectedResults = new String[]{"1", "9", "3", "4", "5", "10", "2", "6", "7", "8"};
Stream<String> results = client
.search("*", new SearchOptions().setOrderBy("Rating desc", "LastRenovationDate asc", "HotelId"),
Context.NONE).stream()
.map(this::getSearchResultId);
assertArrayEquals(results.toArray(), expectedResults);
}
@Test
public void canFilter() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setFilter("Rating gt 3 and LastRenovationDate gt 2000-01-01T00:00:00Z")
.setOrderBy("HotelId asc");
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(2, searchResultsList.size());
assertEquals("1", searchResultsList.get(0).get("HotelId").toString());
assertEquals("5", searchResultsList.get(1).get("HotelId").toString());
}
@Test
public void canSearchWithRangeFacets() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", getSearchOptionsForRangeFacets(),
Context.NONE);
assertNotNull(results.getFacets());
List<RangeFacetResult<String>> baseRateFacets = getRangeFacetsForField(results.getFacets(),
"Rooms/BaseRate", 4);
List<RangeFacetResult<String>> lastRenovationDateFacets = getRangeFacetsForField(
results.getFacets(), "LastRenovationDate", 2);
assertRangeFacets(baseRateFacets, lastRenovationDateFacets);
for (SearchPagedResponse result : results.iterableByPage()) {
assertContainHotelIds(hotels, result.getValue());
}
}
@Test
public void canSearchWithValueFacets() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", getSearchOptionsForValueFacets(),
Context.NONE);
Map<String, List<FacetResult>> facets = results.getFacets();
assertNotNull(facets);
for (SearchPagedResponse result : results.iterableByPage()) {
assertContainHotelIds(hotels, result.getValue());
assertValueFacetsEqual(
getValueFacetsForField(facets, "Rating", 2),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, 5),
new ValueFacetResult<>(4L, 4))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "SmokingAllowed", 2),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(4L, false),
new ValueFacetResult<>(2L, true))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Category", 3),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(5L, "Budget"),
new ValueFacetResult<>(1L, "Boutique"),
new ValueFacetResult<>(1L, "Luxury"))));
assertValueFacetsEqual(getValueFacetsForField(facets, "LastRenovationDate", 6),
new ArrayList<>(Arrays.asList(new ValueFacetResult<>(1L, OffsetDateTime.parse("1970-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("1982-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(2L, OffsetDateTime.parse("1995-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("1999-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("2010-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("2012-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Rooms/BaseRate", 4),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, 2.44),
new ValueFacetResult<>(1L, 7.69),
new ValueFacetResult<>(1L, 8.09),
new ValueFacetResult<>(1L, 9.69))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Tags", 10),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, "24-hour front desk service"),
new ValueFacetResult<>(1L, "air conditioning"),
new ValueFacetResult<>(4L, "budget"),
new ValueFacetResult<>(1L, "coffee in lobby"),
new ValueFacetResult<>(2L, "concierge"),
new ValueFacetResult<>(1L, "motel"),
new ValueFacetResult<>(2L, "pool"),
new ValueFacetResult<>(1L, "restaurant"),
new ValueFacetResult<>(1L, "view"),
new ValueFacetResult<>(4L, "wifi"))));
}
}
@Test
public void canSearchWithLuceneSyntax() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
Map<String, Object> expectedResult = new HashMap<>();
expectedResult.put("HotelName", "Roach Motel");
expectedResult.put("Rating", 1);
SearchOptions searchOptions = new SearchOptions().setQueryType(QueryType.FULL).setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("HotelName:roch~", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(1, searchResultsList.size());
assertEquals(expectedResult, searchResultsList.get(0));
}
@Test
public void canFilterNonNullableType() {
client = setupClient(this::createIndexWithValueTypes);
List<Map<String, Object>> docsList = createDocsListWithValueTypes();
uploadDocuments(client, docsList);
List<Map<String, Object>> expectedDocsList =
docsList.stream()
.filter(d -> !d.get("Key").equals("789"))
.collect(Collectors.toList());
SearchOptions searchOptions = new SearchOptions()
.setFilter("IntValue eq 0 or (Bucket/BucketName eq 'B' and Bucket/Count lt 10)");
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(2, searchResultsList.size());
assertTrue(searchResultsList.containsAll(expectedDocsList));
}
@Test
public void canSearchWithSearchModeAll() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> response = getSearchResults(client
.search("Cheapest hotel", new SearchOptions().setQueryType(QueryType.SIMPLE).setSearchMode(SearchMode.ALL),
Context.NONE));
assertEquals(1, response.size());
assertEquals("2", response.get(0).get("HotelId"));
}
@Test
public void defaultSearchModeIsAny() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> response = getSearchResults(client.search("Cheapest hotel",
new SearchOptions().setOrderBy("HotelId"), Context.NONE));
assertEquals(7, response.size());
assertEquals(
Arrays.asList("1", "10", "2", "3", "4", "5", "9"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void canGetResultCountInSearch() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", new SearchOptions().setIncludeTotalCount(true),
Context.NONE);
assertNotNull(results);
assertEquals(hotels.size(), results.getTotalCount().intValue());
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertNotNull(iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void canSearchWithRegex() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setQueryType(QueryType.FULL)
.setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("HotelName:/.*oach.*\\/?/", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> resultsList = getSearchResults(results);
Map<String, Object> expectedHotel = new HashMap<>();
expectedHotel.put("HotelName", "Roach Motel");
expectedHotel.put("Rating", 1);
assertEquals(1, resultsList.size());
assertEquals(resultsList.get(0), expectedHotel);
}
@Test
public void canSearchWithEscapedSpecialCharsInRegex() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions().setQueryType(QueryType.FULL);
SearchPagedIterable results = client.search("\\+\\-\\&\\|\\!\\(\\)\\{\\}\\[\\]\\^\\~\\*\\?\\:", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> resultsList = getSearchResults(results);
assertEquals(0, resultsList.size());
}
@Test
public void searchWithScoringProfileBoostsScore() {
client = setupClient(this::createHotelIndex);
uploadDocumentsJson(client, HOTELS_DATA_JSON);
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("nearest")
.setScoringParameters(new ScoringParameter("myloc", new GeoPoint(-122.0, 49.0)))
.setFilter("Rating eq 5 or Rating eq 1")
.setOrderBy("HotelId desc");
List<Map<String, Object>> response = getSearchResults(client.search("hotel", searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals("2", response.get(0).get("HotelId").toString());
assertEquals("1", response.get(1).get("HotelId").toString());
}
@Test
public void searchWithScoringProfileEscaper() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("text")
.setScoringParameters(new ScoringParameter("mytag", Arrays.asList("concierge", "Hello, O''Brien")))
.setFilter("Rating eq 5 or Rating eq 1");
List<Map<String, Object>> response = getSearchResults(client.search("hotel",
searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals(
Arrays.asList("1", "2"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void searchWithScoringParametersEmpty() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("text")
.setScoringParameters(new ScoringParameter("mytag", Arrays.asList("", "concierge")))
.setFilter("Rating eq 5 or Rating eq 1");
List<Map<String, Object>> response = getSearchResults(client.search("hotel",
searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals(
Arrays.asList("1", "2"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void canSearchWithMinimumCoverage() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchPagedIterable results = client.search("*", new SearchOptions().setMinimumCoverage(50.0),
Context.NONE);
assertNotNull(results);
assertEquals(100.0, results.getCoverage(), 0);
}
@Test
public void canUseHitHighlighting() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
String description = "Description";
String category = "Category";
SearchOptions sp = new SearchOptions();
sp.setFilter("Rating eq 5");
sp.setHighlightPreTag("<b>");
sp.setHighlightPostTag("</b>");
sp.setHighlightFields(category, description);
SearchPagedIterable results = client.search("luxury hotel", sp, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse result = iterator.next();
List<SearchResult> documents = result.getValue();
assertEquals(1, documents.size());
Map<String, List<String>> highlights = documents.get(0).getHighlights();
assertNotNull(highlights);
assertEquals(2, highlights.keySet().size());
assertTrue(highlights.containsKey(description));
assertTrue(highlights.containsKey(category));
String categoryHighlight = highlights.get(category).get(0);
assertEquals("<b>Luxury</b>", categoryHighlight);
List<String> expectedDescriptionHighlights =
Arrays.asList(
"Best <b>hotel</b> in town if you like <b>luxury</b> <b>hotels</b>.",
"We highly recommend this <b>hotel</b>."
);
assertEquals(expectedDescriptionHighlights, highlights.get(description));
}
@Test
public void canSearchWithSynonyms() {
client = setupClient(this::createHotelIndex);
uploadDocumentsJson(client, HOTELS_DATA_JSON);
String fieldName = "HotelName";
SearchIndexClient searchIndexClient = getSearchIndexClientBuilder().buildClient();
synonymMapToDelete = searchIndexClient.createSynonymMap(new SynonymMap(
testResourceNamer.randomName("names", 32))
.setSynonyms("luxury,fancy")).getName();
SearchIndex hotelsIndex = searchIndexClient.getIndex(client.getIndexName());
hotelsIndex.getFields().stream()
.filter(f -> fieldName.equals(f.getName()))
.findFirst().get().setSynonymMapNames(synonymMapToDelete);
searchIndexClient.createOrUpdateIndex(hotelsIndex);
sleepIfRunningAgainstService(10000);
SearchOptions searchOptions = new SearchOptions()
.setQueryType(QueryType.FULL)
.setSearchFields(fieldName)
.setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("luxury", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> response = getSearchResults(results);
assertEquals(1, response.size());
assertEquals("Fancy Stay", response.get(0).get("HotelName"));
assertEquals(5, response.get(0).get("Rating"));
}
private List<Map<String, Object>> getSearchResults(SearchPagedIterable results) {
Iterator<SearchPagedResponse> resultsIterator = results.iterableByPage().iterator();
List<Map<String, Object>> searchResults = new ArrayList<>();
while (resultsIterator.hasNext()) {
SearchPagedResponse result = resultsIterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> searchResults.add(item.getDocument(SearchDocument.class)));
}
return searchResults;
}
private Map<String, Object> extractAndTransformSingleResult(SearchResult result) {
return convertHashMapToMap((result.getDocument(SearchDocument.class)));
}
/**
* Convert a HashMap object to Map object
*
* @param mapObject object to convert
* @return {@link Map}{@code <}{@link String}{@code ,}{@link Object}{@code >}
*/
@SuppressWarnings("unchecked")
private static Map<String, Object> convertHashMapToMap(Object mapObject) {
HashMap<String, Object> map = (HashMap<String, Object>) mapObject;
Set<Map.Entry<String, Object>> entries = map.entrySet();
Map<String, Object> convertedMap = new HashMap<>();
for (Map.Entry<String, Object> entry : entries) {
Object value = entry.getValue();
if (value instanceof HashMap) {
value = convertHashMapToMap(entry.getValue());
}
if (value instanceof ArrayList) {
value = convertArray((ArrayList<Object>) value);
}
convertedMap.put(entry.getKey(), value);
}
return convertedMap;
}
/**
* Convert Array Object elements
*
* @param array which elements will be converted
* @return {@link ArrayList}{@code <}{@link Object}{@code >}
*/
private static ArrayList<Object> convertArray(ArrayList<Object> array) {
ArrayList<Object> convertedArray = new ArrayList<>();
for (Object arrayValue : array) {
if (arrayValue instanceof HashMap) {
convertedArray.add(convertHashMapToMap(arrayValue));
} else {
convertedArray.add(arrayValue);
}
}
return convertedArray;
}
private void assertKeySequenceEqual(SearchPagedIterable results, List<String> expectedKeys) {
assertNotNull(results);
List<String> actualKeys = results.stream().filter(doc -> doc.getDocument(SearchDocument.class)
.containsKey("HotelId"))
.map(doc -> (String) doc.getDocument(SearchDocument.class).get("HotelId")).collect(Collectors.toList());
assertEquals(expectedKeys, actualKeys);
}
List<Map<String, Object>> createHotelsList(int count) {
List<Map<String, Object>> documents = new ArrayList<>();
for (int i = 1; i <= count; i++) {
Map<String, Object> doc = new HashMap<>();
doc.put("HotelId", Integer.toString(i));
doc.put("HotelName", "Hotel" + i);
doc.put("Description", "Desc" + i);
doc.put("Description_fr", "Desc_fr" + i);
doc.put("Category", "Catg" + i);
doc.put("Tags", Collections.singletonList("tag" + i));
doc.put("ParkingIncluded", false);
doc.put("SmokingAllowed", false);
doc.put("LastRenovationDate", OffsetDateTime.parse("2010-06-27T00:00:00Z"));
doc.put("Rating", i);
documents.add(doc);
}
return documents;
}
boolean compareResults(List<Map<String, Object>> searchResults, List<Map<String, Object>> hotels) {
Iterator<Map<String, Object>> searchIterator = searchResults.iterator();
Iterator<Map<String, Object>> hotelsIterator = hotels.iterator();
while (searchIterator.hasNext() && hotelsIterator.hasNext()) {
Map<String, Object> result = searchIterator.next();
Map<String, Object> hotel = hotelsIterator.next();
assertMapEquals(hotel, result, true, "properties");
}
return true;
}
<T> void assertRangeFacets(List<RangeFacetResult<T>> baseRateFacets, List<RangeFacetResult<T>> lastRenovationDateFacets) {
assertNull(baseRateFacets.get(0).getFrom());
assertEquals(5.0, baseRateFacets.get(0).getTo());
assertEquals(5.0, baseRateFacets.get(1).getFrom());
assertEquals(8.0, baseRateFacets.get(1).getTo());
assertEquals(8.0, baseRateFacets.get(2).getFrom());
assertEquals(10.0, baseRateFacets.get(2).getTo());
assertEquals(10.0, baseRateFacets.get(3).getFrom());
assertNull(baseRateFacets.get(3).getTo());
assertEquals(1, baseRateFacets.get(0).getCount().intValue());
assertEquals(1, baseRateFacets.get(1).getCount().intValue());
assertEquals(1, baseRateFacets.get(2).getCount().intValue());
assertEquals(0, baseRateFacets.get(3).getCount().intValue());
assertNull(lastRenovationDateFacets.get(0).getFrom());
assertEquals("2000-01-01T00:00:00.000+0000", lastRenovationDateFacets.get(0).getTo());
assertEquals("2000-01-01T00:00:00.000+0000", lastRenovationDateFacets.get(1).getFrom());
assertNull(lastRenovationDateFacets.get(1).getTo());
assertEquals(5, lastRenovationDateFacets.get(0).getCount().intValue());
assertEquals(2, lastRenovationDateFacets.get(1).getCount().intValue());
}
<T> List<RangeFacetResult<T>> getRangeFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
List<FacetResult> facetCollection = getFacetsForField(facets, expectedField, expectedCount);
return facetCollection.stream().map(RangeFacetResult<T>::new).collect(Collectors.toList());
}
<T> List<ValueFacetResult<T>> getValueFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
List<FacetResult> facetCollection = getFacetsForField(facets, expectedField, expectedCount);
return facetCollection.stream().map(ValueFacetResult<T>::new)
.collect(Collectors.toList());
}
private List<FacetResult> getFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
assertTrue(facets.containsKey(expectedField));
List<FacetResult> results = facets.get(expectedField);
assertEquals(expectedCount, results.size());
return results;
}
void assertContainHotelIds(List<Map<String, Object>> expected, List<SearchResult> actual) {
assertNotNull(actual);
Set<String> actualKeys = actual.stream().filter(item -> item.getDocument(SearchDocument.class)
.containsKey("HotelId")).map(item -> (String) item.getDocument(SearchDocument.class)
.get("HotelId")).collect(Collectors.toSet());
Set<String> expectedKeys = expected.stream().filter(item -> item.containsKey("HotelId"))
.map(item -> (String) item.get("HotelId")).collect(Collectors.toSet());
assertEquals(expectedKeys, actualKeys);
}
<T> void assertValueFacetsEqual(List<ValueFacetResult<T>> actualFacets, ArrayList<ValueFacetResult<T>> expectedFacets) {
assertEquals(expectedFacets.size(), actualFacets.size());
for (int i = 0; i < actualFacets.size(); i++) {
assertEquals(expectedFacets.get(i).getCount(), actualFacets.get(i).getCount());
assertEquals(expectedFacets.get(i).getValue(), actualFacets.get(i).getValue());
}
}
String getSearchResultId(SearchResult searchResult) {
return searchResult.getDocument(SearchDocument.class).get("HotelId").toString();
}
SearchOptions getSearchOptionsForRangeFacets() {
return new SearchOptions().setFacets("Rooms/BaseRate,values:5|8|10",
"LastRenovationDate,values:2000-01-01T00:00:00Z");
}
SearchOptions getSearchOptionsForValueFacets() {
return new SearchOptions().setFacets("Rating,count:2,sort:-value",
"SmokingAllowed,sort:count",
"Category",
"LastRenovationDate,interval:year",
"Rooms/BaseRate,sort:value",
"Tags,sort:value");
}
void assertListEqualHotelIds(List<String> expected, List<SearchResult> actual) {
assertNotNull(actual);
List<String> actualKeys = actual.stream().filter(item -> item.getDocument(SearchDocument.class)
.containsKey("HotelId")).map(item -> (String) item.getDocument(SearchDocument.class)
.get("HotelId")).collect(Collectors.toList());
assertEquals(expected, actualKeys);
}
String createIndexWithNonNullableTypes() {
SearchIndex index = new SearchIndex(testResourceNamer.randomName("non-nullable-index", 64))
.setFields(Arrays.asList(
new SearchField("Key", SearchFieldDataType.STRING)
.setHidden(false)
.setKey(true),
new SearchField("Rating", SearchFieldDataType.INT32)
.setHidden(false),
new SearchField("Count", SearchFieldDataType.INT64)
.setHidden(false),
new SearchField("IsEnabled", SearchFieldDataType.BOOLEAN)
.setHidden(false),
new SearchField("Ratio", SearchFieldDataType.DOUBLE)
.setHidden(false),
new SearchField("StartDate", SearchFieldDataType.DATE_TIME_OFFSET)
.setHidden(false),
new SearchField("EndDate", SearchFieldDataType.DATE_TIME_OFFSET)
.setHidden(false),
new SearchField("TopLevelBucket", SearchFieldDataType.COMPLEX)
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true))),
new SearchField("Buckets", SearchFieldDataType.collection(SearchFieldDataType.COMPLEX))
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true)))));
setupIndex(index);
return index.getName();
}
String createIndexWithValueTypes() {
SearchIndex index = new SearchIndex(testResourceNamer.randomName("testindex", 64))
.setFields(Arrays.asList(
new SearchField("Key", SearchFieldDataType.STRING)
.setKey(true)
.setSearchable(true),
new SearchField("IntValue", SearchFieldDataType.INT32)
.setFilterable(true),
new SearchField("Bucket", SearchFieldDataType.COMPLEX)
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true)
))
)
);
setupIndex(index);
return index.getName();
}
List<Map<String, Object>> createDocsListWithValueTypes() {
Map<String, Object> element1 = new HashMap<>();
element1.put("Key", "132");
element1.put("IntValue", 0);
Map<String, Object> subElement1 = new HashMap<>();
subElement1.put("BucketName", "A");
subElement1.put("Count", 3);
element1.put("Bucket", subElement1);
Map<String, Object> element2 = new HashMap<>();
element2.put("Key", "456");
element2.put("IntValue", 7);
Map<String, Object> subElement2 = new HashMap<>();
subElement2.put("BucketName", "B");
subElement2.put("Count", 5);
element2.put("Bucket", subElement2);
Map<String, Object> element3 = new HashMap<>();
element3.put("Key", "789");
element3.put("IntValue", 1);
Map<String, Object> subElement3 = new HashMap<>();
subElement3.put("BucketName", "B");
subElement3.put("Count", 99);
element3.put("Bucket", subElement3);
return Arrays.asList(element1, element2, element3);
}
} | class SearchSyncTests extends SearchTestBase {
private final List<String> indexesToDelete = new ArrayList<>();
private String synonymMapToDelete = "";
private static final String INDEX_NAME = "azsearch-search-shared-instance";
private static SearchIndexClient searchIndexClient;
private SearchClient client;
@BeforeAll
public static void setupClass() {
TestBase.setupClass();
if (TEST_MODE == TestMode.PLAYBACK) {
return;
}
searchIndexClient = setupSharedIndex(INDEX_NAME);
}
@Override
protected void afterTest() {
super.afterTest();
SearchIndexClient serviceClient = getSearchIndexClientBuilder().buildClient();
for (String index : indexesToDelete) {
serviceClient.deleteIndex(index);
}
if (!CoreUtils.isNullOrEmpty(synonymMapToDelete)) {
serviceClient.deleteSynonymMap(synonymMapToDelete);
sleepIfRunningAgainstService(5000);
}
}
@AfterAll
protected static void cleanupClass() {
if (TEST_MODE != TestMode.PLAYBACK) {
searchIndexClient.deleteIndex(INDEX_NAME);
}
}
private SearchClient setupClient(Supplier<String> indexSupplier) {
String indexName = indexSupplier.get();
indexesToDelete.add(indexName);
return getSearchClientBuilder(indexName).buildClient();
}
@Test
public void searchThrowsWhenRequestIsMalformed() {
SearchOptions invalidSearchOptions = new SearchOptions().setFilter("This is not a valid filter.");
assertHttpResponseException(
() -> search("*", invalidSearchOptions),
HttpURLConnection.HTTP_BAD_REQUEST,
"Invalid expression: Syntax error at position 7 in 'This is not a valid filter.'");
}
@Test
public void searchThrowsWhenSpecialCharInRegexIsUnescaped() {
SearchOptions invalidSearchOptions = new SearchOptions().setQueryType(QueryType.FULL);
assertHttpResponseException(
() -> search("/.*/.*/", invalidSearchOptions),
HttpURLConnection.HTTP_BAD_REQUEST,
"Failed to parse query string at line 1, column 8.");
}
private void search(String searchText, SearchOptions searchOptions) {
getSearchClientBuilder(INDEX_NAME).buildClient().search(searchText, searchOptions, Context.NONE)
.iterableByPage()
.iterator()
.next();
}
@Test
public void canSearchDynamicDocuments() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = uploadDocumentsJson(client, HOTELS_DATA_JSON_WITHOUT_FR_DESCRIPTION);
SearchPagedIterable searchResults = client.search("*");
assertNotNull(searchResults);
assertNull(searchResults.getTotalCount());
assertNull(searchResults.getCoverage());
assertNull(searchResults.getFacets());
Iterator<SearchPagedResponse> iterator = searchResults.iterableByPage().iterator();
List<Map<String, Object>> actualResults = new ArrayList<>(hotels.size());
while (iterator.hasNext()) {
SearchPagedResponse result = iterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> {
assertEquals(1, item.getScore(), 0);
assertNull(item.getHighlights());
actualResults.add(item.getDocument(SearchDocument.class));
});
}
assertEquals(hotels.size(), actualResults.size());
actualResults.sort(Comparator.comparing(doc -> Integer.parseInt(doc.get("HotelId").toString())));
assertTrue(compareResults(actualResults, hotels));
}
@Test
public void canContinueSearch() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = createHotelsList(100);
uploadDocuments(client, hotels);
SearchOptions searchOptions = new SearchOptions().setSelect("HotelId")
.setOrderBy("HotelId asc");
List<String> expectedHotelIds = hotels.stream().map(hotel -> (String) hotel.get("HotelId")).sorted()
.collect(Collectors.toList());
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse firstPage = iterator.next();
assertEquals(50, firstPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(0, 50), firstPage.getValue());
assertNotNull(firstPage.getContinuationToken());
SearchPagedResponse secondPage = iterator.next();
assertEquals(50, secondPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(50, 100), secondPage.getValue());
assertNull(secondPage.getContinuationToken());
}
@Test
public void canContinueSearchWithTop() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = createHotelsList(3000);
uploadDocuments(client, hotels);
SearchOptions searchOptions = new SearchOptions()
.setTop(2000)
.setSelect("HotelId")
.setOrderBy("HotelId asc");
List<String> expectedHotelIds = hotels.stream().map(hotel -> (String) hotel.get("HotelId")).sorted()
.collect(Collectors.toList());
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse firstPage = iterator.next();
assertEquals(1000, firstPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(0, 1000), firstPage.getValue());
assertNotNull(firstPage.getContinuationToken());
SearchPagedResponse secondPage = iterator.next();
assertEquals(1000, secondPage.getValue().size());
assertListEqualHotelIds(expectedHotelIds.subList(1000, 2000), secondPage.getValue());
assertNull(secondPage.getContinuationToken());
}
@Test
public void canSearchStaticallyTypedDocuments() {
client = setupClient(this::createHotelIndex);
List<Map<String, Object>> hotels = uploadDocumentsJson(client, HOTELS_DATA_JSON_WITHOUT_FR_DESCRIPTION);
SearchPagedIterable results = client.search("*", new SearchOptions(), Context.NONE);
assertNotNull(results);
assertNull(results.getTotalCount());
assertNull(results.getCoverage());
assertNull(results.getFacets());
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
List<Hotel> actualResults = new ArrayList<>(hotels.size());
while (iterator.hasNext()) {
SearchPagedResponse result = iterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> {
assertEquals(1, item.getScore(), 0);
assertNull(item.getHighlights());
Hotel hotel = item.getDocument(Hotel.class);
actualResults.add(hotel);
});
}
List<Hotel> hotelsList = hotels.stream()
.map(hotel -> convertMapToValue(hotel, Hotel.class))
.collect(Collectors.toList());
assertEquals(hotelsList.size(), actualResults.size());
actualResults.sort(Comparator.comparing(doc -> Integer.parseInt(doc.hotelId())));
for (int i = 0; i < hotelsList.size(); i++) {
assertObjectEquals(hotelsList.get(i), actualResults.get(i), true, "properties");
}
}
@SuppressWarnings("UseOfObsoleteDateTimeApi")
@Test
@SuppressWarnings("UseOfObsoleteDateTimeApi")
@Test
public void canSearchWithDateInStaticModel() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
OffsetDateTime expected = OffsetDateTime.parse("2010-06-27T00:00:00Z");
SearchPagedIterable results = client.search("Fancy", new SearchOptions(), Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertTrue(iterator.hasNext());
SearchPagedResponse result = iterator.next();
assertEquals(1, result.getValue().size());
Date actual = result.getValue().get(0).getDocument(Hotel.class).lastRenovationDate();
long epochMilli = expected.toInstant().toEpochMilli();
assertEquals(new Date(epochMilli), actual);
}
@Test
public void canSearchWithSelectedFields() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions sp = new SearchOptions();
sp.setSearchFields("HotelName", "Category");
sp.setSelect("HotelName", "Rating", "Address/City", "Rooms/Type");
SearchPagedIterable results = client.search("fancy luxury secret", sp, Context.NONE);
HashMap<String, Object> expectedHotel1 = new HashMap<>();
expectedHotel1.put("HotelName", "Fancy Stay");
expectedHotel1.put("Rating", 5);
expectedHotel1.put("Address", null);
expectedHotel1.put("Rooms", Collections.emptyList());
HashMap<String, Object> expectedHotel2 = new HashMap<>();
expectedHotel2.put("HotelName", "Secret Point Motel");
expectedHotel2.put("Rating", 4);
HashMap<String, Object> address = new HashMap<>();
address.put("City", "New York");
expectedHotel2.put("Address", address);
HashMap<String, Object> rooms = new HashMap<>();
rooms.put("Type", "Budget Room");
HashMap<String, Object> rooms2 = new HashMap<>();
rooms2.put("Type", "Budget Room");
expectedHotel2.put("Rooms", Arrays.asList(rooms, rooms2));
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse result = iterator.next();
assertEquals(2, result.getValue().size());
Map<String, Object> hotel1 = extractAndTransformSingleResult(result.getValue().get(0));
Map<String, Object> hotel2 = extractAndTransformSingleResult(result.getValue().get(1));
assertMapEquals(expectedHotel1, hotel1, true);
assertMapEquals(expectedHotel2, hotel2, true);
}
@Test
public void canUseTopAndSkipForClientSidePaging() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions parameters = new SearchOptions().setTop(3).setSkip(0).setOrderBy("HotelId");
SearchPagedIterable results = client.search("*", parameters, Context.NONE);
assertKeySequenceEqual(results, Arrays.asList("1", "10", "2"));
parameters.setSkip(3);
results = client.search("*", parameters, Context.NONE);
assertKeySequenceEqual(results, Arrays.asList("3", "4", "5"));
}
@Test
public void searchWithoutOrderBySortsByScore() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
Iterator<SearchResult> results = client
.search("*", new SearchOptions().setFilter("Rating lt 4"), Context.NONE).iterator();
SearchResult firstResult = results.next();
SearchResult secondResult = results.next();
assertTrue(firstResult.getScore() <= secondResult.getScore());
}
@Test
public void orderByProgressivelyBreaksTies() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
String[] expectedResults = new String[]{"1", "9", "3", "4", "5", "10", "2", "6", "7", "8"};
Stream<String> results = client
.search("*", new SearchOptions().setOrderBy("Rating desc", "LastRenovationDate asc", "HotelId"),
Context.NONE).stream()
.map(this::getSearchResultId);
assertArrayEquals(results.toArray(), expectedResults);
}
@Test
public void canFilter() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setFilter("Rating gt 3 and LastRenovationDate gt 2000-01-01T00:00:00Z")
.setOrderBy("HotelId asc");
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(2, searchResultsList.size());
assertEquals("1", searchResultsList.get(0).get("HotelId").toString());
assertEquals("5", searchResultsList.get(1).get("HotelId").toString());
}
@Test
public void canSearchWithRangeFacets() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", getSearchOptionsForRangeFacets(),
Context.NONE);
assertNotNull(results.getFacets());
List<RangeFacetResult<String>> baseRateFacets = getRangeFacetsForField(results.getFacets(),
"Rooms/BaseRate", 4);
List<RangeFacetResult<String>> lastRenovationDateFacets = getRangeFacetsForField(
results.getFacets(), "LastRenovationDate", 2);
assertRangeFacets(baseRateFacets, lastRenovationDateFacets);
for (SearchPagedResponse result : results.iterableByPage()) {
assertContainHotelIds(hotels, result.getValue());
}
}
@Test
public void canSearchWithValueFacets() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", getSearchOptionsForValueFacets(),
Context.NONE);
Map<String, List<FacetResult>> facets = results.getFacets();
assertNotNull(facets);
for (SearchPagedResponse result : results.iterableByPage()) {
assertContainHotelIds(hotels, result.getValue());
assertValueFacetsEqual(
getValueFacetsForField(facets, "Rating", 2),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, 5),
new ValueFacetResult<>(4L, 4))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "SmokingAllowed", 2),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(4L, false),
new ValueFacetResult<>(2L, true))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Category", 3),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(5L, "Budget"),
new ValueFacetResult<>(1L, "Boutique"),
new ValueFacetResult<>(1L, "Luxury"))));
assertValueFacetsEqual(getValueFacetsForField(facets, "LastRenovationDate", 6),
new ArrayList<>(Arrays.asList(new ValueFacetResult<>(1L, OffsetDateTime.parse("1970-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("1982-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(2L, OffsetDateTime.parse("1995-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("1999-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("2010-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)),
new ValueFacetResult<>(1L, OffsetDateTime.parse("2012-01-01T00:00:00Z").format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Rooms/BaseRate", 4),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, 2.44),
new ValueFacetResult<>(1L, 7.69),
new ValueFacetResult<>(1L, 8.09),
new ValueFacetResult<>(1L, 9.69))));
assertValueFacetsEqual(
getValueFacetsForField(facets, "Tags", 10),
new ArrayList<>(Arrays.asList(
new ValueFacetResult<>(1L, "24-hour front desk service"),
new ValueFacetResult<>(1L, "air conditioning"),
new ValueFacetResult<>(4L, "budget"),
new ValueFacetResult<>(1L, "coffee in lobby"),
new ValueFacetResult<>(2L, "concierge"),
new ValueFacetResult<>(1L, "motel"),
new ValueFacetResult<>(2L, "pool"),
new ValueFacetResult<>(1L, "restaurant"),
new ValueFacetResult<>(1L, "view"),
new ValueFacetResult<>(4L, "wifi"))));
}
}
@Test
public void canSearchWithLuceneSyntax() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
Map<String, Object> expectedResult = new HashMap<>();
expectedResult.put("HotelName", "Roach Motel");
expectedResult.put("Rating", 1);
SearchOptions searchOptions = new SearchOptions().setQueryType(QueryType.FULL).setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("HotelName:roch~", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(1, searchResultsList.size());
assertEquals(expectedResult, searchResultsList.get(0));
}
@Test
public void canFilterNonNullableType() {
client = setupClient(this::createIndexWithValueTypes);
List<Map<String, Object>> docsList = createDocsListWithValueTypes();
uploadDocuments(client, docsList);
List<Map<String, Object>> expectedDocsList =
docsList.stream()
.filter(d -> !d.get("Key").equals("789"))
.collect(Collectors.toList());
SearchOptions searchOptions = new SearchOptions()
.setFilter("IntValue eq 0 or (Bucket/BucketName eq 'B' and Bucket/Count lt 10)");
SearchPagedIterable results = client.search("*", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> searchResultsList = getSearchResults(results);
assertEquals(2, searchResultsList.size());
assertTrue(searchResultsList.containsAll(expectedDocsList));
}
@Test
public void canSearchWithSearchModeAll() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> response = getSearchResults(client
.search("Cheapest hotel", new SearchOptions().setQueryType(QueryType.SIMPLE).setSearchMode(SearchMode.ALL),
Context.NONE));
assertEquals(1, response.size());
assertEquals("2", response.get(0).get("HotelId"));
}
@Test
public void defaultSearchModeIsAny() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> response = getSearchResults(client.search("Cheapest hotel",
new SearchOptions().setOrderBy("HotelId"), Context.NONE));
assertEquals(7, response.size());
assertEquals(
Arrays.asList("1", "10", "2", "3", "4", "5", "9"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void canGetResultCountInSearch() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
List<Map<String, Object>> hotels = readJsonFileToList(HOTELS_DATA_JSON);
SearchPagedIterable results = client.search("*", new SearchOptions().setIncludeTotalCount(true),
Context.NONE);
assertNotNull(results);
assertEquals(hotels.size(), results.getTotalCount().intValue());
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
assertNotNull(iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void canSearchWithRegex() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setQueryType(QueryType.FULL)
.setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("HotelName:/.*oach.*\\/?/", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> resultsList = getSearchResults(results);
Map<String, Object> expectedHotel = new HashMap<>();
expectedHotel.put("HotelName", "Roach Motel");
expectedHotel.put("Rating", 1);
assertEquals(1, resultsList.size());
assertEquals(resultsList.get(0), expectedHotel);
}
@Test
public void canSearchWithEscapedSpecialCharsInRegex() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions().setQueryType(QueryType.FULL);
SearchPagedIterable results = client.search("\\+\\-\\&\\|\\!\\(\\)\\{\\}\\[\\]\\^\\~\\*\\?\\:", searchOptions,
Context.NONE);
assertNotNull(results);
List<Map<String, Object>> resultsList = getSearchResults(results);
assertEquals(0, resultsList.size());
}
@Test
public void searchWithScoringProfileBoostsScore() {
client = setupClient(this::createHotelIndex);
uploadDocumentsJson(client, HOTELS_DATA_JSON);
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("nearest")
.setScoringParameters(new ScoringParameter("myloc", new GeoPoint(-122.0, 49.0)))
.setFilter("Rating eq 5 or Rating eq 1")
.setOrderBy("HotelId desc");
List<Map<String, Object>> response = getSearchResults(client.search("hotel", searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals("2", response.get(0).get("HotelId").toString());
assertEquals("1", response.get(1).get("HotelId").toString());
}
@Test
public void searchWithScoringProfileEscaper() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("text")
.setScoringParameters(new ScoringParameter("mytag", Arrays.asList("concierge", "Hello, O''Brien")))
.setFilter("Rating eq 5 or Rating eq 1");
List<Map<String, Object>> response = getSearchResults(client.search("hotel",
searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals(
Arrays.asList("1", "2"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void searchWithScoringParametersEmpty() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchOptions searchOptions = new SearchOptions()
.setScoringProfile("text")
.setScoringParameters(new ScoringParameter("mytag", Arrays.asList("", "concierge")))
.setFilter("Rating eq 5 or Rating eq 1");
List<Map<String, Object>> response = getSearchResults(client.search("hotel",
searchOptions, Context.NONE));
assertEquals(2, response.size());
assertEquals(
Arrays.asList("1", "2"),
response.stream().map(res -> res.get("HotelId").toString()).collect(Collectors.toList()));
}
@Test
public void canSearchWithMinimumCoverage() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
SearchPagedIterable results = client.search("*", new SearchOptions().setMinimumCoverage(50.0),
Context.NONE);
assertNotNull(results);
assertEquals(100.0, results.getCoverage(), 0);
}
@Test
public void canUseHitHighlighting() {
client = getSearchClientBuilder(INDEX_NAME).buildClient();
String description = "Description";
String category = "Category";
SearchOptions sp = new SearchOptions();
sp.setFilter("Rating eq 5");
sp.setHighlightPreTag("<b>");
sp.setHighlightPostTag("</b>");
sp.setHighlightFields(category, description);
SearchPagedIterable results = client.search("luxury hotel", sp, Context.NONE);
assertNotNull(results);
Iterator<SearchPagedResponse> iterator = results.iterableByPage().iterator();
SearchPagedResponse result = iterator.next();
List<SearchResult> documents = result.getValue();
assertEquals(1, documents.size());
Map<String, List<String>> highlights = documents.get(0).getHighlights();
assertNotNull(highlights);
assertEquals(2, highlights.keySet().size());
assertTrue(highlights.containsKey(description));
assertTrue(highlights.containsKey(category));
String categoryHighlight = highlights.get(category).get(0);
assertEquals("<b>Luxury</b>", categoryHighlight);
List<String> expectedDescriptionHighlights =
Arrays.asList(
"Best <b>hotel</b> in town if you like <b>luxury</b> <b>hotels</b>.",
"We highly recommend this <b>hotel</b>."
);
assertEquals(expectedDescriptionHighlights, highlights.get(description));
}
@Test
public void canSearchWithSynonyms() {
client = setupClient(this::createHotelIndex);
uploadDocumentsJson(client, HOTELS_DATA_JSON);
String fieldName = "HotelName";
SearchIndexClient searchIndexClient = getSearchIndexClientBuilder().buildClient();
synonymMapToDelete = searchIndexClient.createSynonymMap(new SynonymMap(
testResourceNamer.randomName("names", 32))
.setSynonyms("luxury,fancy")).getName();
SearchIndex hotelsIndex = searchIndexClient.getIndex(client.getIndexName());
hotelsIndex.getFields().stream()
.filter(f -> fieldName.equals(f.getName()))
.findFirst().get().setSynonymMapNames(synonymMapToDelete);
searchIndexClient.createOrUpdateIndex(hotelsIndex);
sleepIfRunningAgainstService(10000);
SearchOptions searchOptions = new SearchOptions()
.setQueryType(QueryType.FULL)
.setSearchFields(fieldName)
.setSelect("HotelName", "Rating");
SearchPagedIterable results = client.search("luxury", searchOptions, Context.NONE);
assertNotNull(results);
List<Map<String, Object>> response = getSearchResults(results);
assertEquals(1, response.size());
assertEquals("Fancy Stay", response.get(0).get("HotelName"));
assertEquals(5, response.get(0).get("Rating"));
}
private List<Map<String, Object>> getSearchResults(SearchPagedIterable results) {
Iterator<SearchPagedResponse> resultsIterator = results.iterableByPage().iterator();
List<Map<String, Object>> searchResults = new ArrayList<>();
while (resultsIterator.hasNext()) {
SearchPagedResponse result = resultsIterator.next();
assertNotNull(result.getValue());
result.getElements().forEach(item -> searchResults.add(item.getDocument(SearchDocument.class)));
}
return searchResults;
}
private Map<String, Object> extractAndTransformSingleResult(SearchResult result) {
return convertHashMapToMap((result.getDocument(SearchDocument.class)));
}
/**
* Convert a HashMap object to Map object
*
* @param mapObject object to convert
* @return {@link Map}{@code <}{@link String}{@code ,}{@link Object}{@code >}
*/
@SuppressWarnings("unchecked")
private static Map<String, Object> convertHashMapToMap(Object mapObject) {
HashMap<String, Object> map = (HashMap<String, Object>) mapObject;
Set<Map.Entry<String, Object>> entries = map.entrySet();
Map<String, Object> convertedMap = new HashMap<>();
for (Map.Entry<String, Object> entry : entries) {
Object value = entry.getValue();
if (value instanceof HashMap) {
value = convertHashMapToMap(entry.getValue());
}
if (value instanceof ArrayList) {
value = convertArray((ArrayList<Object>) value);
}
convertedMap.put(entry.getKey(), value);
}
return convertedMap;
}
/**
* Convert Array Object elements
*
* @param array which elements will be converted
* @return {@link ArrayList}{@code <}{@link Object}{@code >}
*/
private static ArrayList<Object> convertArray(ArrayList<Object> array) {
ArrayList<Object> convertedArray = new ArrayList<>();
for (Object arrayValue : array) {
if (arrayValue instanceof HashMap) {
convertedArray.add(convertHashMapToMap(arrayValue));
} else {
convertedArray.add(arrayValue);
}
}
return convertedArray;
}
private void assertKeySequenceEqual(SearchPagedIterable results, List<String> expectedKeys) {
assertNotNull(results);
List<String> actualKeys = results.stream().filter(doc -> doc.getDocument(SearchDocument.class)
.containsKey("HotelId"))
.map(doc -> (String) doc.getDocument(SearchDocument.class).get("HotelId")).collect(Collectors.toList());
assertEquals(expectedKeys, actualKeys);
}
List<Map<String, Object>> createHotelsList(int count) {
List<Map<String, Object>> documents = new ArrayList<>();
for (int i = 1; i <= count; i++) {
Map<String, Object> doc = new HashMap<>();
doc.put("HotelId", Integer.toString(i));
doc.put("HotelName", "Hotel" + i);
doc.put("Description", "Desc" + i);
doc.put("Description_fr", "Desc_fr" + i);
doc.put("Category", "Catg" + i);
doc.put("Tags", Collections.singletonList("tag" + i));
doc.put("ParkingIncluded", false);
doc.put("SmokingAllowed", false);
doc.put("LastRenovationDate", OffsetDateTime.parse("2010-06-27T00:00:00Z"));
doc.put("Rating", i);
documents.add(doc);
}
return documents;
}
boolean compareResults(List<Map<String, Object>> searchResults, List<Map<String, Object>> hotels) {
Iterator<Map<String, Object>> searchIterator = searchResults.iterator();
Iterator<Map<String, Object>> hotelsIterator = hotels.iterator();
while (searchIterator.hasNext() && hotelsIterator.hasNext()) {
Map<String, Object> result = searchIterator.next();
Map<String, Object> hotel = hotelsIterator.next();
assertMapEquals(hotel, result, true, "properties");
}
return true;
}
<T> void assertRangeFacets(List<RangeFacetResult<T>> baseRateFacets, List<RangeFacetResult<T>> lastRenovationDateFacets) {
assertNull(baseRateFacets.get(0).getFrom());
assertEquals(5.0, baseRateFacets.get(0).getTo());
assertEquals(5.0, baseRateFacets.get(1).getFrom());
assertEquals(8.0, baseRateFacets.get(1).getTo());
assertEquals(8.0, baseRateFacets.get(2).getFrom());
assertEquals(10.0, baseRateFacets.get(2).getTo());
assertEquals(10.0, baseRateFacets.get(3).getFrom());
assertNull(baseRateFacets.get(3).getTo());
assertEquals(1, baseRateFacets.get(0).getCount().intValue());
assertEquals(1, baseRateFacets.get(1).getCount().intValue());
assertEquals(1, baseRateFacets.get(2).getCount().intValue());
assertEquals(0, baseRateFacets.get(3).getCount().intValue());
assertNull(lastRenovationDateFacets.get(0).getFrom());
assertEquals("2000-01-01T00:00:00.000+0000", lastRenovationDateFacets.get(0).getTo());
assertEquals("2000-01-01T00:00:00.000+0000", lastRenovationDateFacets.get(1).getFrom());
assertNull(lastRenovationDateFacets.get(1).getTo());
assertEquals(5, lastRenovationDateFacets.get(0).getCount().intValue());
assertEquals(2, lastRenovationDateFacets.get(1).getCount().intValue());
}
<T> List<RangeFacetResult<T>> getRangeFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
List<FacetResult> facetCollection = getFacetsForField(facets, expectedField, expectedCount);
return facetCollection.stream().map(RangeFacetResult<T>::new).collect(Collectors.toList());
}
<T> List<ValueFacetResult<T>> getValueFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
List<FacetResult> facetCollection = getFacetsForField(facets, expectedField, expectedCount);
return facetCollection.stream().map(ValueFacetResult<T>::new)
.collect(Collectors.toList());
}
private List<FacetResult> getFacetsForField(
Map<String, List<FacetResult>> facets, String expectedField, int expectedCount) {
assertTrue(facets.containsKey(expectedField));
List<FacetResult> results = facets.get(expectedField);
assertEquals(expectedCount, results.size());
return results;
}
void assertContainHotelIds(List<Map<String, Object>> expected, List<SearchResult> actual) {
assertNotNull(actual);
Set<String> actualKeys = actual.stream().filter(item -> item.getDocument(SearchDocument.class)
.containsKey("HotelId")).map(item -> (String) item.getDocument(SearchDocument.class)
.get("HotelId")).collect(Collectors.toSet());
Set<String> expectedKeys = expected.stream().filter(item -> item.containsKey("HotelId"))
.map(item -> (String) item.get("HotelId")).collect(Collectors.toSet());
assertEquals(expectedKeys, actualKeys);
}
<T> void assertValueFacetsEqual(List<ValueFacetResult<T>> actualFacets, ArrayList<ValueFacetResult<T>> expectedFacets) {
assertEquals(expectedFacets.size(), actualFacets.size());
for (int i = 0; i < actualFacets.size(); i++) {
assertEquals(expectedFacets.get(i).getCount(), actualFacets.get(i).getCount());
assertEquals(expectedFacets.get(i).getValue(), actualFacets.get(i).getValue());
}
}
String getSearchResultId(SearchResult searchResult) {
return searchResult.getDocument(SearchDocument.class).get("HotelId").toString();
}
SearchOptions getSearchOptionsForRangeFacets() {
return new SearchOptions().setFacets("Rooms/BaseRate,values:5|8|10",
"LastRenovationDate,values:2000-01-01T00:00:00Z");
}
SearchOptions getSearchOptionsForValueFacets() {
return new SearchOptions().setFacets("Rating,count:2,sort:-value",
"SmokingAllowed,sort:count",
"Category",
"LastRenovationDate,interval:year",
"Rooms/BaseRate,sort:value",
"Tags,sort:value");
}
void assertListEqualHotelIds(List<String> expected, List<SearchResult> actual) {
assertNotNull(actual);
List<String> actualKeys = actual.stream().filter(item -> item.getDocument(SearchDocument.class)
.containsKey("HotelId")).map(item -> (String) item.getDocument(SearchDocument.class)
.get("HotelId")).collect(Collectors.toList());
assertEquals(expected, actualKeys);
}
String createIndexWithNonNullableTypes() {
SearchIndex index = new SearchIndex(testResourceNamer.randomName("non-nullable-index", 64))
.setFields(Arrays.asList(
new SearchField("Key", SearchFieldDataType.STRING)
.setHidden(false)
.setKey(true),
new SearchField("Rating", SearchFieldDataType.INT32)
.setHidden(false),
new SearchField("Count", SearchFieldDataType.INT64)
.setHidden(false),
new SearchField("IsEnabled", SearchFieldDataType.BOOLEAN)
.setHidden(false),
new SearchField("Ratio", SearchFieldDataType.DOUBLE)
.setHidden(false),
new SearchField("StartDate", SearchFieldDataType.DATE_TIME_OFFSET)
.setHidden(false),
new SearchField("EndDate", SearchFieldDataType.DATE_TIME_OFFSET)
.setHidden(false),
new SearchField("TopLevelBucket", SearchFieldDataType.COMPLEX)
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true))),
new SearchField("Buckets", SearchFieldDataType.collection(SearchFieldDataType.COMPLEX))
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true)))));
setupIndex(index);
return index.getName();
}
String createIndexWithValueTypes() {
SearchIndex index = new SearchIndex(testResourceNamer.randomName("testindex", 64))
.setFields(Arrays.asList(
new SearchField("Key", SearchFieldDataType.STRING)
.setKey(true)
.setSearchable(true),
new SearchField("IntValue", SearchFieldDataType.INT32)
.setFilterable(true),
new SearchField("Bucket", SearchFieldDataType.COMPLEX)
.setFields(Arrays.asList(
new SearchField("BucketName", SearchFieldDataType.STRING)
.setFilterable(true),
new SearchField("Count", SearchFieldDataType.INT32)
.setFilterable(true)
))
)
);
setupIndex(index);
return index.getName();
}
List<Map<String, Object>> createDocsListWithValueTypes() {
Map<String, Object> element1 = new HashMap<>();
element1.put("Key", "132");
element1.put("IntValue", 0);
Map<String, Object> subElement1 = new HashMap<>();
subElement1.put("BucketName", "A");
subElement1.put("Count", 3);
element1.put("Bucket", subElement1);
Map<String, Object> element2 = new HashMap<>();
element2.put("Key", "456");
element2.put("IntValue", 7);
Map<String, Object> subElement2 = new HashMap<>();
subElement2.put("BucketName", "B");
subElement2.put("Count", 5);
element2.put("Bucket", subElement2);
Map<String, Object> element3 = new HashMap<>();
element3.put("Key", "789");
element3.put("IntValue", 1);
Map<String, Object> subElement3 = new HashMap<>();
subElement3.put("BucketName", "B");
subElement3.put("Count", 99);
element3.put("Bucket", subElement3);
return Arrays.asList(element1, element2, element3);
}
} |
The JavaDoc for this builder should be updated to make it clear that the client built from this can support sync and async sending. See https://github.com/Azure/azure-sdk-for-java/pull/32840#discussion_r1067516563 | public NettyAsyncHttpClientBuilder connectionProvider(ConnectionProvider connectionProvider) {
if (connectionProvider != null) {
LOGGER.verbose("Setting ConnectionProvider for the Reactor Netty HttpClient. Please be aware of the "
+ "differences in runtime behavior when creating a default Reactor Netty HttpClient vs an HttpClient"
+ "with a specified ConnectionProvider. For more details see "
+ "https:
}
this.connectionProvider = connectionProvider;
return this;
} | + "with a specified ConnectionProvider. For more details see " | public NettyAsyncHttpClientBuilder connectionProvider(ConnectionProvider connectionProvider) {
if (connectionProvider != null) {
LOGGER.verbose("Setting ConnectionProvider for the Reactor Netty HttpClient. Please be aware of the "
+ "differences in runtime behavior when creating a default Reactor Netty HttpClient vs an HttpClient"
+ "with a specified ConnectionProvider. For more details see "
+ "https:
}
this.connectionProvider = connectionProvider;
return this;
} | class NettyAsyncHttpClientBuilder {
private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1);
private static final long DEFAULT_CONNECT_TIMEOUT;
private static final long DEFAULT_WRITE_TIMEOUT;
private static final long DEFAULT_RESPONSE_TIMEOUT;
private static final long DEFAULT_READ_TIMEOUT;
private static final ClientLogger LOGGER = new ClientLogger(NettyAsyncHttpClientBuilder.class);
static {
Configuration configuration = Configuration.getGlobalConfiguration();
DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), LOGGER).toMillis();
DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_RESPONSE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_RESPONSE_TIMEOUT, Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
}
private final HttpClient baseHttpClient;
private ProxyOptions proxyOptions;
private ConnectionProvider connectionProvider;
private boolean enableWiretap;
private int port = 80;
private EventLoopGroup eventLoopGroup;
private Configuration configuration;
private boolean disableBufferCopy;
private Duration connectTimeout;
private Duration writeTimeout;
private Duration responseTimeout;
private Duration readTimeout;
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link
* com.azure.core.http.HttpClient} backed by Reactor Netty.
*/
public NettyAsyncHttpClientBuilder() {
this.baseHttpClient = null;
}
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link HttpClient}
* based on the provided Reactor Netty HttpClient.
*
* <!-- src_embed com.azure.core.http.netty.from-existing-http-client -->
* <pre>
* &
* reactor.netty.http.client.HttpClient baseHttpClient = reactor.netty.http.client.HttpClient.create&
* .wiretap&
* &
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.from-existing-http-client -->
*
* @param nettyHttpClient base reactor netty HttpClient
*/
public NettyAsyncHttpClientBuilder(HttpClient nettyHttpClient) {
this.baseHttpClient = Objects.requireNonNull(nettyHttpClient, "'nettyHttpClient' cannot be null.");
}
/**
* Creates a new Netty-backed {@link com.azure.core.http.HttpClient} instance on every call, using the configuration
* set in the builder at the time of the build method call.
*
* @return A new Netty-backed {@link com.azure.core.http.HttpClient} instance.
* @throws IllegalStateException If the builder is configured to use an unknown proxy type.
*/
public com.azure.core.http.HttpClient build() {
HttpClient nettyHttpClient;
boolean addressResolverWasSetByBuilder = false;
if (this.baseHttpClient != null) {
nettyHttpClient = baseHttpClient;
} else if (this.connectionProvider != null) {
nettyHttpClient = HttpClient.create(this.connectionProvider).resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
} else {
nettyHttpClient = HttpClient.create().resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
}
HttpResponseDecoderSpec initialSpec = nettyHttpClient.configuration().decoder();
nettyHttpClient = nettyHttpClient
.port(port)
.wiretap(enableWiretap)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, (int) getTimeoutMillis(connectTimeout,
DEFAULT_CONNECT_TIMEOUT))
.httpResponseDecoder(httpResponseDecoderSpec -> initialSpec.validateHeaders(false));
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = proxyOptions == null
? ProxyOptions.fromConfiguration(buildConfiguration, true)
: proxyOptions;
/*
* Only configure the custom authorization challenge handler and challenge holder when using an authenticated
* HTTP proxy. All other proxying such as SOCKS4, SOCKS5, and anonymous HTTP will use Netty's built-in handlers.
*/
boolean useCustomProxyHandler = shouldUseCustomProxyHandler(buildProxyOptions);
AuthorizationChallengeHandler handler = useCustomProxyHandler
? new AuthorizationChallengeHandler(buildProxyOptions.getUsername(), buildProxyOptions.getPassword())
: null;
AtomicReference<ChallengeHolder> proxyChallengeHolder = useCustomProxyHandler ? new AtomicReference<>() : null;
boolean addProxyHandler = false;
Pattern nonProxyHostsPattern = null;
if (eventLoopGroup != null) {
nettyHttpClient = nettyHttpClient.runOn(eventLoopGroup);
}
if (buildProxyOptions != null) {
if (handler != null) {
addProxyHandler = true;
nonProxyHostsPattern = CoreUtils.isNullOrEmpty(buildProxyOptions.getNonProxyHosts())
? null
: Pattern.compile(buildProxyOptions.getNonProxyHosts(), Pattern.CASE_INSENSITIVE);
} else {
nettyHttpClient = nettyHttpClient.proxy(proxy ->
proxy.type(toReactorNettyProxyType(buildProxyOptions.getType()))
.address(buildProxyOptions.getAddress())
.username(buildProxyOptions.getUsername())
.password(ignored -> buildProxyOptions.getPassword())
.nonProxyHosts(buildProxyOptions.getNonProxyHosts()));
}
AddressResolverGroup<?> resolver = nettyHttpClient.configuration().resolver();
if (resolver == null || addressResolverWasSetByBuilder) {
nettyHttpClient = nettyHttpClient.resolver(NoopAddressResolverGroup.INSTANCE);
}
}
return new NettyAsyncHttpClient(nettyHttpClient, disableBufferCopy,
getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT),
getTimeoutMillis(responseTimeout, DEFAULT_RESPONSE_TIMEOUT), addProxyHandler, buildProxyOptions,
nonProxyHostsPattern, handler, proxyChallengeHolder);
}
/**
* Sets the connection provider.
*
* <p><strong>Code Sample</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
* <pre>
* &
* &
* &
* &
* ConnectionProvider connectionProvider = ConnectionProvider.builder&
* .maxConnections&
* .pendingAcquireMaxCount&
* .maxIdleTime&
* .build&
*
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .connectionProvider&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
*
* @param connectionProvider the connection provider
* @return the updated {@link NettyAsyncHttpClientBuilder} object.
*/
NettyAsyncHttpClientBuilder connectionProviderInternal(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
return this;
}
/**
* Sets the {@link ProxyOptions proxy options} that the client will use.
*
* @param proxyOptions The proxy configuration to use.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Enables the Netty wiretap feature.
*
* @param enableWiretap Flag indicating wiretap status
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder wiretap(boolean enableWiretap) {
this.enableWiretap = enableWiretap;
return this;
}
/**
* Sets the port which this client should connect, which by default will be set to port 80.
*
* @param port The port to connect to.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder port(int port) {
this.port = port;
return this;
}
/**
* Sets the NIO event loop group that will be used to run IO loops.
*
* @param nioEventLoopGroup The {@link NioEventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
* @deprecated deprecated in favor of {@link
*/
@Deprecated
public NettyAsyncHttpClientBuilder nioEventLoopGroup(NioEventLoopGroup nioEventLoopGroup) {
this.eventLoopGroup = nioEventLoopGroup;
return this;
}
/**
* Sets the IO event loop group that will be used to run IO loops.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder
* <pre>
* int threadCount = 5;
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder
*
* @param eventLoopGroup The {@link EventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder eventLoopGroup(EventLoopGroup eventLoopGroup) {
this.eventLoopGroup = eventLoopGroup;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Disables deep copy of response {@link ByteBuffer} into a heap location that is managed by this client as opposed
* to the underlying netty library which may use direct buffer pool.
* <br>
* <b>
* Caution: Disabling this is not recommended as it can lead to data corruption if the downstream consumers of the
* response do not handle the byte buffers before netty releases them.
* </b>
* If copy is disabled, underlying Netty layer can potentially reclaim byte array backed by the {@code ByteBuffer}
* upon the return of {@code onNext()}. So, users should ensure they process the {@link ByteBuffer} immediately and
* then return.
*
* <!-- src_embed com.azure.core.http.netty.disabled-buffer-copy -->
* <pre>
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .port&
* .disableBufferCopy&
* .build&
*
* client.send&
* .flatMapMany&
* .map&
* .subscribe&
* </pre>
* <!-- end com.azure.core.http.netty.disabled-buffer-copy -->
*
* @param disableBufferCopy If set to {@code true}, the client built from this builder will not deep-copy response
* {@link ByteBuffer ByteBuffers}.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder disableBufferCopy(boolean disableBufferCopy) {
this.disableBufferCopy = disableBufferCopy;
return this;
}
/**
* Sets the connection timeout for a request to be sent.
* <p>
* The connection timeout begins once the request attempts to connect to the remote host and finishes once the
* connection is resolved.
* <p>
* If {@code connectTimeout} is null either {@link Configuration
* 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will
* be used.
* <p>
* By default, the connection timeout is 10 seconds.
*
* @param connectTimeout Connect timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder connectTimeout(Duration connectTimeout) {
this.connectTimeout = connectTimeout;
return this;
}
/**
* Sets the writing timeout for a request to be sent.
* <p>
* The writing timeout does not apply to the entire request but to the request being sent over the wire. For example
* a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last
* write tracker will update when each operation completes and the outbound buffer will be periodically checked to
* determine if it is still draining.
* <p>
* If {@code writeTimeout} is null either {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be
* used.
*
* @param writeTimeout Write operation timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) {
this.writeTimeout = writeTimeout;
return this;
}
/**
* Sets the response timeout duration used when waiting for a server to reply.
* <p>
* The response timeout begins once the request write completes and finishes once the first response read is
* triggered when the server response is received.
* <p>
* If {@code responseTimeout} is null either {@link Configuration
* 60-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied to the response. When applying the timeout the greatest of one millisecond and the value of {@code
* responseTimeout} will be used.
*
* @param responseTimeout Response timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder responseTimeout(Duration responseTimeout) {
this.responseTimeout = responseTimeout;
return this;
}
/**
* Sets the read timeout duration used when reading the server response.
* <p>
* The read timeout begins once the first response read is triggered after the server response is received. This
* timeout triggers periodically but won't fire its operation if another read operation has completed between when
* the timeout is triggered and completes.
* <p>
* If {@code readTimeout} is null or {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be
* applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code
* readTimeout} will be used.
*
* @param readTimeout Read timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
private static boolean shouldUseCustomProxyHandler(ProxyOptions options) {
return options != null && options.getUsername() != null && options.getType() == ProxyOptions.Type.HTTP;
}
private static ProxyProvider.Proxy toReactorNettyProxyType(ProxyOptions.Type azureProxyType) {
switch (azureProxyType) {
case HTTP:
return ProxyProvider.Proxy.HTTP;
case SOCKS4:
return ProxyProvider.Proxy.SOCKS4;
case SOCKS5:
return ProxyProvider.Proxy.SOCKS5;
default:
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Unknown 'ProxyOptions.Type' enum value"));
}
}
/*
* Returns the timeout in milliseconds to use based on the passed Duration and default timeout.
*
* If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero
* no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used.
*/
static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) {
if (configuredTimeout == null) {
return defaultTimeout;
}
if (configuredTimeout.isZero() || configuredTimeout.isNegative()) {
return 0;
}
return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT);
}
} | class NettyAsyncHttpClientBuilder {
private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1);
private static final long DEFAULT_CONNECT_TIMEOUT;
private static final long DEFAULT_WRITE_TIMEOUT;
private static final long DEFAULT_RESPONSE_TIMEOUT;
private static final long DEFAULT_READ_TIMEOUT;
private static final ClientLogger LOGGER = new ClientLogger(NettyAsyncHttpClientBuilder.class);
static {
Configuration configuration = Configuration.getGlobalConfiguration();
DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), LOGGER).toMillis();
DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_RESPONSE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration,
PROPERTY_AZURE_REQUEST_RESPONSE_TIMEOUT, Duration.ofSeconds(60), LOGGER).toMillis();
DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT,
Duration.ofSeconds(60), LOGGER).toMillis();
}
private final HttpClient baseHttpClient;
private ProxyOptions proxyOptions;
private ConnectionProvider connectionProvider;
private boolean enableWiretap;
private int port = 80;
private EventLoopGroup eventLoopGroup;
private Configuration configuration;
private boolean disableBufferCopy;
private Duration connectTimeout;
private Duration writeTimeout;
private Duration responseTimeout;
private Duration readTimeout;
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link
* com.azure.core.http.HttpClient} backed by Reactor Netty.
*/
public NettyAsyncHttpClientBuilder() {
this.baseHttpClient = null;
}
/**
* Creates a new builder instance, where a builder is capable of generating multiple instances of {@link HttpClient}
* based on the provided Reactor Netty HttpClient.
*
* <!-- src_embed com.azure.core.http.netty.from-existing-http-client -->
* <pre>
* &
* reactor.netty.http.client.HttpClient baseHttpClient = reactor.netty.http.client.HttpClient.create&
* .wiretap&
* &
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.from-existing-http-client -->
*
* @param nettyHttpClient base reactor netty HttpClient
*/
public NettyAsyncHttpClientBuilder(HttpClient nettyHttpClient) {
this.baseHttpClient = Objects.requireNonNull(nettyHttpClient, "'nettyHttpClient' cannot be null.");
}
/**
* Creates a new Netty-backed {@link com.azure.core.http.HttpClient} instance on every call, using the configuration
* set in the builder at the time of the build method call. Please be aware that client built from this builder can
* support synchronously and asynchronously call of sending request. Use
* {@link com.azure.core.http.HttpClient
* with contextual information.
*
* @return A new Netty-backed {@link com.azure.core.http.HttpClient} instance.
* @throws IllegalStateException If the builder is configured to use an unknown proxy type.
*/
public com.azure.core.http.HttpClient build() {
HttpClient nettyHttpClient;
boolean addressResolverWasSetByBuilder = false;
if (this.baseHttpClient != null) {
nettyHttpClient = baseHttpClient;
} else if (this.connectionProvider != null) {
nettyHttpClient = HttpClient.create(this.connectionProvider).resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
} else {
nettyHttpClient = HttpClient.create().resolver(DefaultAddressResolverGroup.INSTANCE);
addressResolverWasSetByBuilder = true;
}
HttpResponseDecoderSpec initialSpec = nettyHttpClient.configuration().decoder();
nettyHttpClient = nettyHttpClient
.port(port)
.wiretap(enableWiretap)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, (int) getTimeoutMillis(connectTimeout,
DEFAULT_CONNECT_TIMEOUT))
.httpResponseDecoder(httpResponseDecoderSpec -> initialSpec.validateHeaders(false));
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
ProxyOptions buildProxyOptions = proxyOptions == null
? ProxyOptions.fromConfiguration(buildConfiguration, true)
: proxyOptions;
/*
* Only configure the custom authorization challenge handler and challenge holder when using an authenticated
* HTTP proxy. All other proxying such as SOCKS4, SOCKS5, and anonymous HTTP will use Netty's built-in handlers.
*/
boolean useCustomProxyHandler = shouldUseCustomProxyHandler(buildProxyOptions);
AuthorizationChallengeHandler handler = useCustomProxyHandler
? new AuthorizationChallengeHandler(buildProxyOptions.getUsername(), buildProxyOptions.getPassword())
: null;
AtomicReference<ChallengeHolder> proxyChallengeHolder = useCustomProxyHandler ? new AtomicReference<>() : null;
boolean addProxyHandler = false;
Pattern nonProxyHostsPattern = null;
if (eventLoopGroup != null) {
nettyHttpClient = nettyHttpClient.runOn(eventLoopGroup);
}
if (buildProxyOptions != null) {
if (handler != null) {
addProxyHandler = true;
nonProxyHostsPattern = CoreUtils.isNullOrEmpty(buildProxyOptions.getNonProxyHosts())
? null
: Pattern.compile(buildProxyOptions.getNonProxyHosts(), Pattern.CASE_INSENSITIVE);
} else {
nettyHttpClient = nettyHttpClient.proxy(proxy ->
proxy.type(toReactorNettyProxyType(buildProxyOptions.getType()))
.address(buildProxyOptions.getAddress())
.username(buildProxyOptions.getUsername())
.password(ignored -> buildProxyOptions.getPassword())
.nonProxyHosts(buildProxyOptions.getNonProxyHosts()));
}
AddressResolverGroup<?> resolver = nettyHttpClient.configuration().resolver();
if (resolver == null || addressResolverWasSetByBuilder) {
nettyHttpClient = nettyHttpClient.resolver(NoopAddressResolverGroup.INSTANCE);
}
}
return new NettyAsyncHttpClient(nettyHttpClient, disableBufferCopy,
getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT),
getTimeoutMillis(responseTimeout, DEFAULT_RESPONSE_TIMEOUT), addProxyHandler, buildProxyOptions,
nonProxyHostsPattern, handler, proxyChallengeHolder);
}
/**
* Sets the connection provider.
*
* <p><strong>Code Sample</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
* <pre>
* &
* &
* &
* &
* ConnectionProvider connectionProvider = ConnectionProvider.builder&
* .maxConnections&
* .pendingAcquireMaxCount&
* .maxIdleTime&
* .build&
*
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .connectionProvider&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder.connectionProvider
*
* @param connectionProvider the connection provider
* @return the updated {@link NettyAsyncHttpClientBuilder} object.
*/
NettyAsyncHttpClientBuilder connectionProviderInternal(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
return this;
}
/**
* Sets the {@link ProxyOptions proxy options} that the client will use.
*
* @param proxyOptions The proxy configuration to use.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Enables the Netty wiretap feature.
*
* @param enableWiretap Flag indicating wiretap status
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder wiretap(boolean enableWiretap) {
this.enableWiretap = enableWiretap;
return this;
}
/**
* Sets the port which this client should connect, which by default will be set to port 80.
*
* @param port The port to connect to.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder port(int port) {
this.port = port;
return this;
}
/**
* Sets the NIO event loop group that will be used to run IO loops.
*
* @param nioEventLoopGroup The {@link NioEventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
* @deprecated deprecated in favor of {@link
*/
@Deprecated
public NettyAsyncHttpClientBuilder nioEventLoopGroup(NioEventLoopGroup nioEventLoopGroup) {
this.eventLoopGroup = nioEventLoopGroup;
return this;
}
/**
* Sets the IO event loop group that will be used to run IO loops.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.core.http.netty.NettyAsyncHttpClientBuilder
* <pre>
* int threadCount = 5;
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .eventLoopGroup&
* .build&
* </pre>
* <!-- end com.azure.core.http.netty.NettyAsyncHttpClientBuilder
*
* @param eventLoopGroup The {@link EventLoopGroup} that will run IO loops.
* @return the updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder eventLoopGroup(EventLoopGroup eventLoopGroup) {
this.eventLoopGroup = eventLoopGroup;
return this;
}
/**
* Sets the configuration store that is used during construction of the HTTP client.
* <p>
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated NettyAsyncHttpClientBuilder object.
*/
public NettyAsyncHttpClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Disables deep copy of response {@link ByteBuffer} into a heap location that is managed by this client as opposed
* to the underlying netty library which may use direct buffer pool.
* <br>
* <b>
* Caution: Disabling this is not recommended as it can lead to data corruption if the downstream consumers of the
* response do not handle the byte buffers before netty releases them.
* </b>
* If copy is disabled, underlying Netty layer can potentially reclaim byte array backed by the {@code ByteBuffer}
* upon the return of {@code onNext()}. So, users should ensure they process the {@link ByteBuffer} immediately and
* then return.
*
* <!-- src_embed com.azure.core.http.netty.disabled-buffer-copy -->
* <pre>
* HttpClient client = new NettyAsyncHttpClientBuilder&
* .port&
* .disableBufferCopy&
* .build&
*
* client.send&
* .flatMapMany&
* .map&
* .subscribe&
* </pre>
* <!-- end com.azure.core.http.netty.disabled-buffer-copy -->
*
* @param disableBufferCopy If set to {@code true}, the client built from this builder will not deep-copy response
* {@link ByteBuffer ByteBuffers}.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder disableBufferCopy(boolean disableBufferCopy) {
this.disableBufferCopy = disableBufferCopy;
return this;
}
/**
* Sets the connection timeout for a request to be sent.
* <p>
* The connection timeout begins once the request attempts to connect to the remote host and finishes once the
* connection is resolved.
* <p>
* If {@code connectTimeout} is null either {@link Configuration
* 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will
* be used.
* <p>
* By default, the connection timeout is 10 seconds.
*
* @param connectTimeout Connect timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder connectTimeout(Duration connectTimeout) {
this.connectTimeout = connectTimeout;
return this;
}
/**
* Sets the writing timeout for a request to be sent.
* <p>
* The writing timeout does not apply to the entire request but to the request being sent over the wire. For example
* a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last
* write tracker will update when each operation completes and the outbound buffer will be periodically checked to
* determine if it is still draining.
* <p>
* If {@code writeTimeout} is null either {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be
* applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be
* used.
*
* @param writeTimeout Write operation timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) {
this.writeTimeout = writeTimeout;
return this;
}
/**
* Sets the response timeout duration used when waiting for a server to reply.
* <p>
* The response timeout begins once the request write completes and finishes once the first response read is
* triggered when the server response is received.
* <p>
* If {@code responseTimeout} is null either {@link Configuration
* 60-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be
* applied to the response. When applying the timeout the greatest of one millisecond and the value of {@code
* responseTimeout} will be used.
*
* @param responseTimeout Response timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder responseTimeout(Duration responseTimeout) {
this.responseTimeout = responseTimeout;
return this;
}
/**
* Sets the read timeout duration used when reading the server response.
* <p>
* The read timeout begins once the first response read is triggered after the server response is received. This
* timeout triggers periodically but won't fire its operation if another read operation has completed between when
* the timeout is triggered and completes.
* <p>
* If {@code readTimeout} is null or {@link Configuration
* timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be
* applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code
* readTimeout} will be used.
*
* @param readTimeout Read timeout duration.
* @return The updated {@link NettyAsyncHttpClientBuilder} object.
*/
public NettyAsyncHttpClientBuilder readTimeout(Duration readTimeout) {
this.readTimeout = readTimeout;
return this;
}
private static boolean shouldUseCustomProxyHandler(ProxyOptions options) {
return options != null && options.getUsername() != null && options.getType() == ProxyOptions.Type.HTTP;
}
private static ProxyProvider.Proxy toReactorNettyProxyType(ProxyOptions.Type azureProxyType) {
switch (azureProxyType) {
case HTTP:
return ProxyProvider.Proxy.HTTP;
case SOCKS4:
return ProxyProvider.Proxy.SOCKS4;
case SOCKS5:
return ProxyProvider.Proxy.SOCKS5;
default:
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("Unknown 'ProxyOptions.Type' enum value"));
}
}
/*
* Returns the timeout in milliseconds to use based on the passed Duration and default timeout.
*
* If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero
* no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used.
*/
static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) {
if (configuredTimeout == null) {
return defaultTimeout;
}
if (configuredTimeout.isZero() || configuredTimeout.isNegative()) {
return 0;
}
return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT);
}
} |
```suggestion this.bufferSize = (int) Math.min(options.getSize(), DEFAULT_BUFFER_SIZE); ``` As discussed offline, I don't see any reason to use `2 * size` rather than just `size`. | public DownloadBlobNonSharedClientTest(BlobPerfStressOptions options) {
super(options);
this.bufferSize = (int) Math.min(2 * options.getSize(), DEFAULT_BUFFER_SIZE);
this.buffer = new byte[bufferSize];
} | this.bufferSize = (int) Math.min(2 * options.getSize(), DEFAULT_BUFFER_SIZE); | public DownloadBlobNonSharedClientTest(BlobPerfStressOptions options) {
super(options);
this.bufferSize = StoragePerfUtils.getDynamicDownloadBufferSize(options.getSize());
this.buffer = new byte[bufferSize];
} | class DownloadBlobNonSharedClientTest extends AbstractDownloadTest<BlobPerfStressOptions> {
private static final int DEFAULT_BUFFER_SIZE = 16 * 1024 * 1024;
String blobName = "downloadTest";
private final OutputStream devNull = new NullOutputStream();
private final int bufferSize;
private final byte[] buffer;
@Override
public void run() {
BlobClient blobClient = new BlobClientBuilder()
.containerName(CONTAINER_NAME)
.connectionString(connectionString)
.blobName(blobName)
.buildClient();
blobClient.download(devNull);
}
@Override
public Mono<Void> runAsync() {
BlobAsyncClient blobAsyncClient = new BlobClientBuilder()
.containerName(CONTAINER_NAME)
.connectionString(connectionString)
.blobName(blobName)
.buildAsyncClient();
return blobAsyncClient.download()
.map(b -> {
int readCount = 0;
int remaining = b.remaining();
while (readCount < remaining) {
int expectedReadCount = Math.min(remaining - readCount, bufferSize);
b.get(buffer, 0, expectedReadCount);
readCount += expectedReadCount;
}
return 1;
}).then();
}
} | class DownloadBlobNonSharedClientTest extends AbstractDownloadTest<BlobPerfStressOptions> {
String blobName = "downloadTest";
private final OutputStream devNull = new NullOutputStream();
private final int bufferSize;
private final byte[] buffer;
@Override
public void run() {
BlobClient blobClient = new BlobClientBuilder()
.containerName(CONTAINER_NAME)
.connectionString(connectionString)
.blobName(blobName)
.buildClient();
blobClient.download(devNull);
}
@Override
public Mono<Void> runAsync() {
BlobAsyncClient blobAsyncClient = new BlobClientBuilder()
.containerName(CONTAINER_NAME)
.connectionString(connectionString)
.blobName(blobName)
.buildAsyncClient();
return blobAsyncClient.download()
.map(b -> {
int readCount = 0;
int remaining = b.remaining();
while (readCount < remaining) {
int expectedReadCount = Math.min(remaining - readCount, bufferSize);
b.get(buffer, 0, expectedReadCount);
readCount += expectedReadCount;
}
return 1;
}).then();
}
} |
Current convenience layer `BlobProperties` doesn't support lastAccessTimeTrackingPolicy. Use serviceClient instead. | private void enableLastAccessTimeTrackingPolicy(String saName) {
storageManager
.serviceClient()
.getBlobServices()
.setServiceProperties(rgName, saName, new BlobServicePropertiesInner()
.withIsVersioningEnabled(false)
.withLastAccessTimeTrackingPolicy(
new LastAccessTimeTrackingPolicy()
.withEnable(true)
.withName(Name.ACCESS_TIME_TRACKING)
.withTrackingGranularityInDays(1)
.withBlobType(Collections.singletonList("blockBlob"))));
Assertions.assertTrue(storageManager.blobServices().getServicePropertiesAsync(rgName, saName).block().innerModel().lastAccessTimeTrackingPolicy().enable());
} | .withLastAccessTimeTrackingPolicy( | private void enableLastAccessTimeTrackingPolicy(String saName) {
storageManager
.serviceClient()
.getBlobServices()
.setServiceProperties(rgName, saName, new BlobServicePropertiesInner()
.withIsVersioningEnabled(false)
.withLastAccessTimeTrackingPolicy(
new LastAccessTimeTrackingPolicy()
.withEnable(true)
.withName(Name.ACCESS_TIME_TRACKING)
.withTrackingGranularityInDays(1)
.withBlobType(Collections.singletonList("blockBlob"))));
Assertions.assertTrue(storageManager.blobServices().getServicePropertiesAsync(rgName, saName).block().innerModel().lastAccessTimeTrackingPolicy().enable());
} | class StorageManagementPoliciesTests extends StorageManagementTest {
private String rgName = "";
@Override
protected void initializeClients(HttpPipeline httpPipeline, AzureProfile profile) {
rgName = generateRandomResourceName("javacsmrg", 15);
super.initializeClients(httpPipeline, profile);
}
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
@Test
public void canCreateManagementPolicies() {
String saName = generateRandomResourceName("javacmsa", 15);
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(saName)
.withRegion(Region.US_WEST_CENTRAL)
.withNewResourceGroup(rgName)
.withBlobStorageAccountKind()
.withAccessTier(AccessTier.COOL)
.create();
ManagementPolicies managementPolicies = this.storageManager.managementPolicies();
ManagementPolicy managementPolicy =
managementPolicies
.define("management-test")
.withExistingStorageAccount(rgName, saName)
.defineRule("rule1")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withPrefixToFilterFor("container1/foo")
.withTierToCoolActionOnBaseBlob(30)
.withTierToArchiveActionOnBaseBlob(90)
.withDeleteActionOnBaseBlob(2555)
.withDeleteActionOnSnapShot(90)
.attach()
.create();
List<String> blobTypesToFilterFor = new ArrayList<>();
blobTypesToFilterFor.add("blockBlob");
List<String> prefixesToFilterFor = new ArrayList<>();
prefixesToFilterFor.add("container1/foo");
Assertions.assertEquals("rule1", managementPolicy.policy().rules().get(0).name());
Assertions
.assertEquals(
blobTypesToFilterFor, managementPolicy.policy().rules().get(0).definition().filters().blobTypes());
Assertions
.assertEquals(
prefixesToFilterFor, managementPolicy.policy().rules().get(0).definition().filters().prefixMatch());
Assertions
.assertEquals(
30,
managementPolicy
.policy()
.rules()
.get(0)
.definition()
.actions()
.baseBlob()
.tierToCool()
.daysAfterModificationGreaterThan(),
0.001);
Assertions
.assertEquals(
90,
managementPolicy
.policy()
.rules()
.get(0)
.definition()
.actions()
.baseBlob()
.tierToArchive()
.daysAfterModificationGreaterThan(),
0.001);
Assertions
.assertEquals(
2555,
managementPolicy
.policy()
.rules()
.get(0)
.definition()
.actions()
.baseBlob()
.delete()
.daysAfterModificationGreaterThan(),
0.001);
Assertions
.assertEquals(
90,
managementPolicy
.policy()
.rules()
.get(0)
.definition()
.actions()
.snapshot()
.delete()
.daysAfterCreationGreaterThan(),
0.001);
}
@Test
public void managementPolicyGetters() {
String saName = generateRandomResourceName("javacmsa", 15);
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(saName)
.withRegion(Region.US_WEST_CENTRAL)
.withNewResourceGroup(rgName)
.withBlobStorageAccountKind()
.withAccessTier(AccessTier.COOL)
.create();
ManagementPolicies managementPolicies = this.storageManager.managementPolicies();
ManagementPolicy managementPolicy =
managementPolicies
.define("management-test")
.withExistingStorageAccount(rgName, saName)
.defineRule("rule1")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withPrefixToFilterFor("container1/foo")
.withTierToCoolActionOnBaseBlob(30)
.withTierToArchiveActionOnBaseBlob(90)
.withDeleteActionOnBaseBlob(2555)
.withDeleteActionOnSnapShot(90)
.attach()
.create();
List<BlobTypes> blobTypesToFilterFor = new ArrayList<>();
blobTypesToFilterFor.add(BlobTypes.BLOCK_BLOB);
List<String> prefixesToFilterFor = new ArrayList<>();
prefixesToFilterFor.add("container1/foo");
List<PolicyRule> rules = managementPolicy.rules();
Assertions.assertEquals("rule1", rules.get(0).name());
Assertions
.assertArrayEquals(
Collections.unmodifiableList(blobTypesToFilterFor).toArray(),
rules.get(0).blobTypesToFilterFor().toArray());
Assertions
.assertArrayEquals(
Collections.unmodifiableList(prefixesToFilterFor).toArray(),
rules.get(0).prefixesToFilterFor().toArray());
Assertions.assertEquals(30, rules.get(0).daysAfterBaseBlobModificationUntilCooling().intValue());
Assertions.assertTrue(rules.get(0).tierToCoolActionOnBaseBlobEnabled());
Assertions.assertEquals(90, rules.get(0).daysAfterBaseBlobModificationUntilArchiving().intValue());
Assertions.assertTrue(rules.get(0).tierToArchiveActionOnBaseBlobEnabled());
Assertions.assertEquals(2555, rules.get(0).daysAfterBaseBlobModificationUntilDeleting().intValue());
Assertions.assertTrue(rules.get(0).deleteActionOnBaseBlobEnabled());
Assertions.assertEquals(90, rules.get(0).daysAfterSnapShotCreationUntilDeleting().intValue());
Assertions.assertTrue(rules.get(0).deleteActionOnSnapShotEnabled());
}
@Test
public void canUpdateManagementPolicy() {
String saName = generateRandomResourceName("javacmsa", 15);
List<BlobTypes> blobTypesToFilterFor = new ArrayList<>();
blobTypesToFilterFor.add(BlobTypes.BLOCK_BLOB);
List<String> prefixesToFilterFor = new ArrayList<>();
prefixesToFilterFor.add("container1/foo");
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(saName)
.withRegion(Region.US_WEST_CENTRAL)
.withNewResourceGroup(rgName)
.withBlobStorageAccountKind()
.withAccessTier(AccessTier.COOL)
.create();
ManagementPolicies managementPolicies = this.storageManager.managementPolicies();
ManagementPolicy managementPolicy =
managementPolicies
.define("management-test")
.withExistingStorageAccount(rgName, saName)
.defineRule("rule1")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withPrefixToFilterFor("asdf")
.withDeleteActionOnSnapShot(100)
.attach()
.defineRule("rule2")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withDeleteActionOnBaseBlob(30)
.attach()
.create();
managementPolicy
.update()
.updateRule("rule1")
.withPrefixesToFilterFor(prefixesToFilterFor)
.withTierToCoolActionOnBaseBlob(30)
.withTierToArchiveActionOnBaseBlob(90)
.withDeleteActionOnBaseBlob(2555)
.withDeleteActionOnSnapShot(90)
.parent()
.withoutRule("rule2")
.apply();
List<PolicyRule> rules = managementPolicy.rules();
Assertions.assertEquals(1, rules.size());
Assertions.assertEquals("rule1", rules.get(0).name());
Assertions
.assertArrayEquals(
Collections.unmodifiableList(blobTypesToFilterFor).toArray(),
rules.get(0).blobTypesToFilterFor().toArray());
Assertions
.assertArrayEquals(
Collections.unmodifiableList(prefixesToFilterFor).toArray(),
rules.get(0).prefixesToFilterFor().toArray());
Assertions.assertEquals(30, rules.get(0).daysAfterBaseBlobModificationUntilCooling().intValue());
Assertions.assertTrue(rules.get(0).tierToCoolActionOnBaseBlobEnabled());
Assertions.assertEquals(90, rules.get(0).daysAfterBaseBlobModificationUntilArchiving().intValue());
Assertions.assertTrue(rules.get(0).tierToArchiveActionOnBaseBlobEnabled());
Assertions.assertEquals(2555, rules.get(0).daysAfterBaseBlobModificationUntilDeleting().intValue());
Assertions.assertTrue(rules.get(0).deleteActionOnBaseBlobEnabled());
Assertions.assertEquals(90, rules.get(0).daysAfterSnapShotCreationUntilDeleting().intValue());
Assertions.assertTrue(rules.get(0).deleteActionOnSnapShotEnabled());
}
@Test
public void testLcmBaseBlobActionsWithPremiumAccount() {
String saName = generateRandomResourceName("javacmsa", 15);
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(saName)
.withRegion(Region.US_WEST_CENTRAL)
.withNewResourceGroup(rgName)
.withSku(StorageAccountSkuType.PREMIUM_LRS)
.withBlockBlobStorageAccountKind()
.create();
Assertions.assertEquals(StorageAccountSkuType.PREMIUM_LRS.name(), storageAccount.skuType().name());
Assertions.assertEquals(Kind.BLOCK_BLOB_STORAGE, storageAccount.kind());
enableLastAccessTimeTrackingPolicy(saName);
ManagementPolicies managementPolicies = this.storageManager.managementPolicies();
ManagementPolicy managementPolicy = managementPolicies.define("management-test")
.withExistingStorageAccount(rgName, saName)
.defineRule("tierToHotLMT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToHot(new DateAfterModification().withDaysAfterModificationGreaterThan(50f)))
.attach()
.defineRule("tierToCoolLMT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToCool(new DateAfterModification().withDaysAfterModificationGreaterThan(50f)))
.attach()
.defineRule("tierToArchiveLMT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToArchive(new DateAfterModification().withDaysAfterModificationGreaterThan(50f)))
.attach()
.defineRule("tierToHotCreated")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToHot(new DateAfterModification().withDaysAfterCreationGreaterThan(50f)))
.attach()
.defineRule("tierToCoolCreated")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToCool(new DateAfterModification().withDaysAfterCreationGreaterThan(50f)))
.attach()
.defineRule("tierToArchiveCreated")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToArchive(new DateAfterModification().withDaysAfterCreationGreaterThan(50f)))
.attach()
.defineRule("tierToHotLAT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToHot(new DateAfterModification().withDaysAfterLastAccessTimeGreaterThan(50f)))
.attach()
.defineRule("tierToCoolLAT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToCool(new DateAfterModification().withDaysAfterLastAccessTimeGreaterThan(50f)))
.attach()
.defineRule("tierToArchiveLAT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToArchive(new DateAfterModification().withDaysAfterLastAccessTimeGreaterThan(50f)))
.attach()
.defineRule("tierToCoolAutoUpTierLAT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToCool(new DateAfterModification().withDaysAfterLastAccessTimeGreaterThan(50f)).withEnableAutoTierToHotFromCool(true))
.attach()
.create();
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> ResourceManagerUtils.toPrimitiveBoolean(rule.actionsOnBaseBlob().enableAutoTierToHotFromCool())));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToHot() != null && rule.actionsOnBaseBlob().tierToHot().daysAfterModificationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToCool() != null && rule.actionsOnBaseBlob().tierToCool().daysAfterModificationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToArchive() != null && rule.actionsOnBaseBlob().tierToArchive().daysAfterModificationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToHot() != null && rule.actionsOnBaseBlob().tierToHot().daysAfterCreationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToCool() != null && rule.actionsOnBaseBlob().tierToCool().daysAfterCreationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToArchive() != null && rule.actionsOnBaseBlob().tierToArchive().daysAfterCreationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToHot() != null && rule.actionsOnBaseBlob().tierToHot().daysAfterLastAccessTimeGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToCool() != null && rule.actionsOnBaseBlob().tierToCool().daysAfterLastAccessTimeGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToArchive() != null && rule.actionsOnBaseBlob().tierToArchive().daysAfterLastAccessTimeGreaterThan() != null));
}
} | class StorageManagementPoliciesTests extends StorageManagementTest {
private String rgName = "";
@Override
protected void initializeClients(HttpPipeline httpPipeline, AzureProfile profile) {
rgName = generateRandomResourceName("javacsmrg", 15);
super.initializeClients(httpPipeline, profile);
}
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().deleteByName(rgName);
}
@Test
public void canCreateManagementPolicies() {
String saName = generateRandomResourceName("javacmsa", 15);
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(saName)
.withRegion(Region.US_WEST_CENTRAL)
.withNewResourceGroup(rgName)
.withBlobStorageAccountKind()
.withAccessTier(AccessTier.COOL)
.create();
ManagementPolicies managementPolicies = this.storageManager.managementPolicies();
ManagementPolicy managementPolicy =
managementPolicies
.define("management-test")
.withExistingStorageAccount(rgName, saName)
.defineRule("rule1")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withPrefixToFilterFor("container1/foo")
.withTierToCoolActionOnBaseBlob(30)
.withTierToArchiveActionOnBaseBlob(90)
.withDeleteActionOnBaseBlob(2555)
.withDeleteActionOnSnapShot(90)
.attach()
.create();
List<String> blobTypesToFilterFor = new ArrayList<>();
blobTypesToFilterFor.add("blockBlob");
List<String> prefixesToFilterFor = new ArrayList<>();
prefixesToFilterFor.add("container1/foo");
Assertions.assertEquals("rule1", managementPolicy.policy().rules().get(0).name());
Assertions
.assertEquals(
blobTypesToFilterFor, managementPolicy.policy().rules().get(0).definition().filters().blobTypes());
Assertions
.assertEquals(
prefixesToFilterFor, managementPolicy.policy().rules().get(0).definition().filters().prefixMatch());
Assertions
.assertEquals(
30,
managementPolicy
.policy()
.rules()
.get(0)
.definition()
.actions()
.baseBlob()
.tierToCool()
.daysAfterModificationGreaterThan(),
0.001);
Assertions
.assertEquals(
90,
managementPolicy
.policy()
.rules()
.get(0)
.definition()
.actions()
.baseBlob()
.tierToArchive()
.daysAfterModificationGreaterThan(),
0.001);
Assertions
.assertEquals(
2555,
managementPolicy
.policy()
.rules()
.get(0)
.definition()
.actions()
.baseBlob()
.delete()
.daysAfterModificationGreaterThan(),
0.001);
Assertions
.assertEquals(
90,
managementPolicy
.policy()
.rules()
.get(0)
.definition()
.actions()
.snapshot()
.delete()
.daysAfterCreationGreaterThan(),
0.001);
}
@Test
public void managementPolicyGetters() {
String saName = generateRandomResourceName("javacmsa", 15);
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(saName)
.withRegion(Region.US_WEST_CENTRAL)
.withNewResourceGroup(rgName)
.withBlobStorageAccountKind()
.withAccessTier(AccessTier.COOL)
.create();
ManagementPolicies managementPolicies = this.storageManager.managementPolicies();
ManagementPolicy managementPolicy =
managementPolicies
.define("management-test")
.withExistingStorageAccount(rgName, saName)
.defineRule("rule1")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withPrefixToFilterFor("container1/foo")
.withTierToCoolActionOnBaseBlob(30)
.withTierToArchiveActionOnBaseBlob(90)
.withDeleteActionOnBaseBlob(2555)
.withDeleteActionOnSnapShot(90)
.attach()
.create();
List<BlobTypes> blobTypesToFilterFor = new ArrayList<>();
blobTypesToFilterFor.add(BlobTypes.BLOCK_BLOB);
List<String> prefixesToFilterFor = new ArrayList<>();
prefixesToFilterFor.add("container1/foo");
List<PolicyRule> rules = managementPolicy.rules();
Assertions.assertEquals("rule1", rules.get(0).name());
Assertions
.assertArrayEquals(
Collections.unmodifiableList(blobTypesToFilterFor).toArray(),
rules.get(0).blobTypesToFilterFor().toArray());
Assertions
.assertArrayEquals(
Collections.unmodifiableList(prefixesToFilterFor).toArray(),
rules.get(0).prefixesToFilterFor().toArray());
Assertions.assertEquals(30, rules.get(0).daysAfterBaseBlobModificationUntilCooling().intValue());
Assertions.assertTrue(rules.get(0).tierToCoolActionOnBaseBlobEnabled());
Assertions.assertEquals(90, rules.get(0).daysAfterBaseBlobModificationUntilArchiving().intValue());
Assertions.assertTrue(rules.get(0).tierToArchiveActionOnBaseBlobEnabled());
Assertions.assertEquals(2555, rules.get(0).daysAfterBaseBlobModificationUntilDeleting().intValue());
Assertions.assertTrue(rules.get(0).deleteActionOnBaseBlobEnabled());
Assertions.assertEquals(90, rules.get(0).daysAfterSnapShotCreationUntilDeleting().intValue());
Assertions.assertTrue(rules.get(0).deleteActionOnSnapShotEnabled());
}
@Test
public void canUpdateManagementPolicy() {
String saName = generateRandomResourceName("javacmsa", 15);
List<BlobTypes> blobTypesToFilterFor = new ArrayList<>();
blobTypesToFilterFor.add(BlobTypes.BLOCK_BLOB);
List<String> prefixesToFilterFor = new ArrayList<>();
prefixesToFilterFor.add("container1/foo");
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(saName)
.withRegion(Region.US_WEST_CENTRAL)
.withNewResourceGroup(rgName)
.withBlobStorageAccountKind()
.withAccessTier(AccessTier.COOL)
.create();
ManagementPolicies managementPolicies = this.storageManager.managementPolicies();
ManagementPolicy managementPolicy =
managementPolicies
.define("management-test")
.withExistingStorageAccount(rgName, saName)
.defineRule("rule1")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withPrefixToFilterFor("asdf")
.withDeleteActionOnSnapShot(100)
.attach()
.defineRule("rule2")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withDeleteActionOnBaseBlob(30)
.attach()
.create();
managementPolicy
.update()
.updateRule("rule1")
.withPrefixesToFilterFor(prefixesToFilterFor)
.withTierToCoolActionOnBaseBlob(30)
.withTierToArchiveActionOnBaseBlob(90)
.withDeleteActionOnBaseBlob(2555)
.withDeleteActionOnSnapShot(90)
.parent()
.withoutRule("rule2")
.apply();
List<PolicyRule> rules = managementPolicy.rules();
Assertions.assertEquals(1, rules.size());
Assertions.assertEquals("rule1", rules.get(0).name());
Assertions
.assertArrayEquals(
Collections.unmodifiableList(blobTypesToFilterFor).toArray(),
rules.get(0).blobTypesToFilterFor().toArray());
Assertions
.assertArrayEquals(
Collections.unmodifiableList(prefixesToFilterFor).toArray(),
rules.get(0).prefixesToFilterFor().toArray());
Assertions.assertEquals(30, rules.get(0).daysAfterBaseBlobModificationUntilCooling().intValue());
Assertions.assertTrue(rules.get(0).tierToCoolActionOnBaseBlobEnabled());
Assertions.assertEquals(90, rules.get(0).daysAfterBaseBlobModificationUntilArchiving().intValue());
Assertions.assertTrue(rules.get(0).tierToArchiveActionOnBaseBlobEnabled());
Assertions.assertEquals(2555, rules.get(0).daysAfterBaseBlobModificationUntilDeleting().intValue());
Assertions.assertTrue(rules.get(0).deleteActionOnBaseBlobEnabled());
Assertions.assertEquals(90, rules.get(0).daysAfterSnapShotCreationUntilDeleting().intValue());
Assertions.assertTrue(rules.get(0).deleteActionOnSnapShotEnabled());
}
@Test
public void testLcmBaseBlobActionsWithPremiumAccount() {
String saName = generateRandomResourceName("javacmsa", 15);
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(saName)
.withRegion(Region.US_WEST_CENTRAL)
.withNewResourceGroup(rgName)
.withSku(StorageAccountSkuType.PREMIUM_LRS)
.withBlockBlobStorageAccountKind()
.create();
Assertions.assertEquals(StorageAccountSkuType.PREMIUM_LRS.name(), storageAccount.skuType().name());
Assertions.assertEquals(Kind.BLOCK_BLOB_STORAGE, storageAccount.kind());
enableLastAccessTimeTrackingPolicy(saName);
ManagementPolicies managementPolicies = this.storageManager.managementPolicies();
ManagementPolicy managementPolicy = managementPolicies.define("management-test")
.withExistingStorageAccount(rgName, saName)
.defineRule("tierToHotLMT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToHot(new DateAfterModification().withDaysAfterModificationGreaterThan(50f)))
.attach()
.defineRule("tierToCoolLMT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToCool(new DateAfterModification().withDaysAfterModificationGreaterThan(50f)))
.attach()
.defineRule("tierToArchiveLMT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToArchive(new DateAfterModification().withDaysAfterModificationGreaterThan(50f)))
.attach()
.defineRule("tierToHotCreated")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToHot(new DateAfterModification().withDaysAfterCreationGreaterThan(50f)))
.attach()
.defineRule("tierToCoolCreated")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToCool(new DateAfterModification().withDaysAfterCreationGreaterThan(50f)))
.attach()
.defineRule("tierToArchiveCreated")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToArchive(new DateAfterModification().withDaysAfterCreationGreaterThan(50f)))
.attach()
.defineRule("tierToHotLAT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToHot(new DateAfterModification().withDaysAfterLastAccessTimeGreaterThan(50f)))
.attach()
.defineRule("tierToCoolLAT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToCool(new DateAfterModification().withDaysAfterLastAccessTimeGreaterThan(50f)))
.attach()
.defineRule("tierToArchiveLAT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToArchive(new DateAfterModification().withDaysAfterLastAccessTimeGreaterThan(50f)))
.attach()
.defineRule("tierToCoolAutoUpTierLAT")
.withLifecycleRuleType()
.withBlobTypeToFilterFor(BlobTypes.BLOCK_BLOB)
.withActionsOnBaseBlob(new ManagementPolicyBaseBlob().withTierToCool(new DateAfterModification().withDaysAfterLastAccessTimeGreaterThan(50f)).withEnableAutoTierToHotFromCool(true))
.attach()
.create();
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> ResourceManagerUtils.toPrimitiveBoolean(rule.actionsOnBaseBlob().enableAutoTierToHotFromCool())));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToHot() != null && rule.actionsOnBaseBlob().tierToHot().daysAfterModificationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToCool() != null && rule.actionsOnBaseBlob().tierToCool().daysAfterModificationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToArchive() != null && rule.actionsOnBaseBlob().tierToArchive().daysAfterModificationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToHot() != null && rule.actionsOnBaseBlob().tierToHot().daysAfterCreationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToCool() != null && rule.actionsOnBaseBlob().tierToCool().daysAfterCreationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToArchive() != null && rule.actionsOnBaseBlob().tierToArchive().daysAfterCreationGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToHot() != null && rule.actionsOnBaseBlob().tierToHot().daysAfterLastAccessTimeGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToCool() != null && rule.actionsOnBaseBlob().tierToCool().daysAfterLastAccessTimeGreaterThan() != null));
Assertions.assertTrue(managementPolicy.rules().stream().anyMatch(rule -> rule.actionsOnBaseBlob().tierToArchive() != null && rule.actionsOnBaseBlob().tierToArchive().daysAfterLastAccessTimeGreaterThan() != null));
}
} |
It would be good to cache the length after the first time retrieving it. | public Long getLength() {
return content.stream().mapToLong(Buffer::remaining).sum();
} | return content.stream().mapToLong(Buffer::remaining).sum(); | public Long getLength() {
if (cachedLength == null) {
cachedLength = content.stream().mapToLong(Buffer::remaining).sum();
}
return cachedLength;
} | class ListByteBufferContent extends BinaryDataContent {
private static final ClientLogger LOGGER = new ClientLogger(ListByteBufferContent.class);
private final List<ByteBuffer> content;
private volatile byte[] bytes;
private static final AtomicReferenceFieldUpdater<ListByteBufferContent, byte[]> BYTES_UPDATER
= AtomicReferenceFieldUpdater.newUpdater(ListByteBufferContent.class, byte[].class, "bytes");
/**
* Creates a new instance of {@link BinaryDataContent}.
*
* @param content The {@link ByteBuffer} content.
* @throws NullPointerException If {@code content} is null.
*/
public ListByteBufferContent(List<ByteBuffer> content) {
this.content = Objects.requireNonNull(content, "'content' cannot be null.");
}
@Override
@Override
public String toString() {
return new String(toBytes(), StandardCharsets.UTF_8);
}
@Override
public byte[] toBytes() {
return BYTES_UPDATER.updateAndGet(this, bytes -> bytes == null ? getBytes() : bytes);
}
@Override
public <T> T toObject(TypeReference<T> typeReference, ObjectSerializer serializer) {
return serializer.deserializeFromBytes(toBytes(), typeReference);
}
@Override
public InputStream toStream() {
return new IterableOfByteBuffersInputStream(content);
}
@Override
public ByteBuffer toByteBuffer() {
return ByteBuffer.wrap(toBytes()).asReadOnlyBuffer();
}
@Override
public Flux<ByteBuffer> toFluxByteBuffer() {
return Flux.fromIterable(content).map(ByteBuffer::asReadOnlyBuffer);
}
@Override
public boolean isReplayable() {
return true;
}
@Override
public BinaryDataContent toReplayableContent() {
return this;
}
@Override
public Mono<BinaryDataContent> toReplayableContentAsync() {
return Mono.just(this);
}
private byte[] getBytes() {
long length = getLength();
if (length > MAX_ARRAY_SIZE) {
throw LOGGER.logExceptionAsError(new IllegalStateException(TOO_LARGE_FOR_BYTE_ARRAY + length));
}
byte[] bytes = new byte[(int)length];
int offset = 0;
for (ByteBuffer bb : content) {
bb = bb.duplicate();
int count = bb.remaining();
bb.get(bytes, offset, count);
offset += count;
}
return bytes;
}
} | class ListByteBufferContent extends BinaryDataContent {
private static final ClientLogger LOGGER = new ClientLogger(ListByteBufferContent.class);
private final List<ByteBuffer> content;
private volatile byte[] bytes;
private static final AtomicReferenceFieldUpdater<ListByteBufferContent, byte[]> BYTES_UPDATER
= AtomicReferenceFieldUpdater.newUpdater(ListByteBufferContent.class, byte[].class, "bytes");
private Long cachedLength;
/**
* Creates a new instance of {@link BinaryDataContent}.
*
* @param content The {@link ByteBuffer} content.
* @throws NullPointerException If {@code content} is null.
*/
public ListByteBufferContent(List<ByteBuffer> content) {
this.content = Objects.requireNonNull(content, "'content' cannot be null.");
}
@Override
@Override
public String toString() {
return new String(toBytes(), StandardCharsets.UTF_8);
}
@Override
public byte[] toBytes() {
return BYTES_UPDATER.updateAndGet(this, bytes -> bytes == null ? getBytes() : bytes);
}
@Override
public <T> T toObject(TypeReference<T> typeReference, ObjectSerializer serializer) {
return serializer.deserializeFromBytes(toBytes(), typeReference);
}
@Override
public InputStream toStream() {
return new IterableOfByteBuffersInputStream(content);
}
@Override
public ByteBuffer toByteBuffer() {
return ByteBuffer.wrap(toBytes()).asReadOnlyBuffer();
}
@Override
public Flux<ByteBuffer> toFluxByteBuffer() {
return Flux.fromIterable(content).map(ByteBuffer::asReadOnlyBuffer);
}
@Override
public boolean isReplayable() {
return true;
}
@Override
public BinaryDataContent toReplayableContent() {
return this;
}
@Override
public Mono<BinaryDataContent> toReplayableContentAsync() {
return Mono.just(this);
}
private byte[] getBytes() {
long length = getLength();
if (length > MAX_ARRAY_SIZE) {
throw LOGGER.logExceptionAsError(new IllegalStateException(TOO_LARGE_FOR_BYTE_ARRAY + length));
}
byte[] bytes = new byte[(int) length];
int offset = 0;
for (ByteBuffer bb : content) {
bb = bb.duplicate();
int count = bb.remaining();
bb.get(bytes, offset, count);
offset += count;
}
return bytes;
}
} |
I guess we can skip this call to `getLength()` and keep a running sum of the length in the for loop below and if the sum exceeds the max array size, throw an exception. We don't have to go through the list of all byte buffers twice. | private byte[] getBytes() {
long length = getLength();
if (length > MAX_ARRAY_SIZE) {
throw LOGGER.logExceptionAsError(new IllegalStateException(TOO_LARGE_FOR_BYTE_ARRAY + length));
}
byte[] bytes = new byte[(int) length];
int offset = 0;
for (ByteBuffer bb : content) {
bb = bb.duplicate();
int count = bb.remaining();
bb.get(bytes, offset, count);
offset += count;
}
return bytes;
} | long length = getLength(); | private byte[] getBytes() {
long length = getLength();
if (length > MAX_ARRAY_SIZE) {
throw LOGGER.logExceptionAsError(new IllegalStateException(TOO_LARGE_FOR_BYTE_ARRAY + length));
}
byte[] bytes = new byte[(int) length];
int offset = 0;
for (ByteBuffer bb : content) {
bb = bb.duplicate();
int count = bb.remaining();
bb.get(bytes, offset, count);
offset += count;
}
return bytes;
} | class ListByteBufferContent extends BinaryDataContent {
private static final ClientLogger LOGGER = new ClientLogger(ListByteBufferContent.class);
private final List<ByteBuffer> content;
private volatile byte[] bytes;
private static final AtomicReferenceFieldUpdater<ListByteBufferContent, byte[]> BYTES_UPDATER
= AtomicReferenceFieldUpdater.newUpdater(ListByteBufferContent.class, byte[].class, "bytes");
private Long cachedLength;
/**
* Creates a new instance of {@link BinaryDataContent}.
*
* @param content The {@link ByteBuffer} content.
* @throws NullPointerException If {@code content} is null.
*/
public ListByteBufferContent(List<ByteBuffer> content) {
this.content = Objects.requireNonNull(content, "'content' cannot be null.");
}
@Override
public Long getLength() {
if (cachedLength == null) {
cachedLength = content.stream().mapToLong(Buffer::remaining).sum();
}
return cachedLength;
}
@Override
public String toString() {
return new String(toBytes(), StandardCharsets.UTF_8);
}
@Override
public byte[] toBytes() {
return BYTES_UPDATER.updateAndGet(this, bytes -> bytes == null ? getBytes() : bytes);
}
@Override
public <T> T toObject(TypeReference<T> typeReference, ObjectSerializer serializer) {
return serializer.deserializeFromBytes(toBytes(), typeReference);
}
@Override
public InputStream toStream() {
return new IterableOfByteBuffersInputStream(content);
}
@Override
public ByteBuffer toByteBuffer() {
return ByteBuffer.wrap(toBytes()).asReadOnlyBuffer();
}
@Override
public Flux<ByteBuffer> toFluxByteBuffer() {
return Flux.fromIterable(content).map(ByteBuffer::asReadOnlyBuffer);
}
@Override
public boolean isReplayable() {
return true;
}
@Override
public BinaryDataContent toReplayableContent() {
return this;
}
@Override
public Mono<BinaryDataContent> toReplayableContentAsync() {
return Mono.just(this);
}
} | class ListByteBufferContent extends BinaryDataContent {
private static final ClientLogger LOGGER = new ClientLogger(ListByteBufferContent.class);
private final List<ByteBuffer> content;
private volatile byte[] bytes;
private static final AtomicReferenceFieldUpdater<ListByteBufferContent, byte[]> BYTES_UPDATER
= AtomicReferenceFieldUpdater.newUpdater(ListByteBufferContent.class, byte[].class, "bytes");
private Long cachedLength;
/**
* Creates a new instance of {@link BinaryDataContent}.
*
* @param content The {@link ByteBuffer} content.
* @throws NullPointerException If {@code content} is null.
*/
public ListByteBufferContent(List<ByteBuffer> content) {
this.content = Objects.requireNonNull(content, "'content' cannot be null.");
}
@Override
public Long getLength() {
if (cachedLength == null) {
cachedLength = content.stream().mapToLong(Buffer::remaining).sum();
}
return cachedLength;
}
@Override
public String toString() {
return new String(toBytes(), StandardCharsets.UTF_8);
}
@Override
public byte[] toBytes() {
return BYTES_UPDATER.updateAndGet(this, bytes -> bytes == null ? getBytes() : bytes);
}
@Override
public <T> T toObject(TypeReference<T> typeReference, ObjectSerializer serializer) {
return serializer.deserializeFromBytes(toBytes(), typeReference);
}
@Override
public InputStream toStream() {
return new IterableOfByteBuffersInputStream(content);
}
@Override
public ByteBuffer toByteBuffer() {
return ByteBuffer.wrap(toBytes()).asReadOnlyBuffer();
}
@Override
public Flux<ByteBuffer> toFluxByteBuffer() {
return Flux.fromIterable(content).map(ByteBuffer::asReadOnlyBuffer);
}
@Override
public boolean isReplayable() {
return true;
}
@Override
public BinaryDataContent toReplayableContent() {
return this;
}
@Override
public Mono<BinaryDataContent> toReplayableContentAsync() {
return Mono.just(this);
}
} |
Since we need the length to allocate the byte array and not resize the array and copy, let's leave this as is. | private byte[] getBytes() {
long length = getLength();
if (length > MAX_ARRAY_SIZE) {
throw LOGGER.logExceptionAsError(new IllegalStateException(TOO_LARGE_FOR_BYTE_ARRAY + length));
}
byte[] bytes = new byte[(int) length];
int offset = 0;
for (ByteBuffer bb : content) {
bb = bb.duplicate();
int count = bb.remaining();
bb.get(bytes, offset, count);
offset += count;
}
return bytes;
} | long length = getLength(); | private byte[] getBytes() {
long length = getLength();
if (length > MAX_ARRAY_SIZE) {
throw LOGGER.logExceptionAsError(new IllegalStateException(TOO_LARGE_FOR_BYTE_ARRAY + length));
}
byte[] bytes = new byte[(int) length];
int offset = 0;
for (ByteBuffer bb : content) {
bb = bb.duplicate();
int count = bb.remaining();
bb.get(bytes, offset, count);
offset += count;
}
return bytes;
} | class ListByteBufferContent extends BinaryDataContent {
private static final ClientLogger LOGGER = new ClientLogger(ListByteBufferContent.class);
private final List<ByteBuffer> content;
private volatile byte[] bytes;
private static final AtomicReferenceFieldUpdater<ListByteBufferContent, byte[]> BYTES_UPDATER
= AtomicReferenceFieldUpdater.newUpdater(ListByteBufferContent.class, byte[].class, "bytes");
private Long cachedLength;
/**
* Creates a new instance of {@link BinaryDataContent}.
*
* @param content The {@link ByteBuffer} content.
* @throws NullPointerException If {@code content} is null.
*/
public ListByteBufferContent(List<ByteBuffer> content) {
this.content = Objects.requireNonNull(content, "'content' cannot be null.");
}
@Override
public Long getLength() {
if (cachedLength == null) {
cachedLength = content.stream().mapToLong(Buffer::remaining).sum();
}
return cachedLength;
}
@Override
public String toString() {
return new String(toBytes(), StandardCharsets.UTF_8);
}
@Override
public byte[] toBytes() {
return BYTES_UPDATER.updateAndGet(this, bytes -> bytes == null ? getBytes() : bytes);
}
@Override
public <T> T toObject(TypeReference<T> typeReference, ObjectSerializer serializer) {
return serializer.deserializeFromBytes(toBytes(), typeReference);
}
@Override
public InputStream toStream() {
return new IterableOfByteBuffersInputStream(content);
}
@Override
public ByteBuffer toByteBuffer() {
return ByteBuffer.wrap(toBytes()).asReadOnlyBuffer();
}
@Override
public Flux<ByteBuffer> toFluxByteBuffer() {
return Flux.fromIterable(content).map(ByteBuffer::asReadOnlyBuffer);
}
@Override
public boolean isReplayable() {
return true;
}
@Override
public BinaryDataContent toReplayableContent() {
return this;
}
@Override
public Mono<BinaryDataContent> toReplayableContentAsync() {
return Mono.just(this);
}
} | class ListByteBufferContent extends BinaryDataContent {
private static final ClientLogger LOGGER = new ClientLogger(ListByteBufferContent.class);
private final List<ByteBuffer> content;
private volatile byte[] bytes;
private static final AtomicReferenceFieldUpdater<ListByteBufferContent, byte[]> BYTES_UPDATER
= AtomicReferenceFieldUpdater.newUpdater(ListByteBufferContent.class, byte[].class, "bytes");
private Long cachedLength;
/**
* Creates a new instance of {@link BinaryDataContent}.
*
* @param content The {@link ByteBuffer} content.
* @throws NullPointerException If {@code content} is null.
*/
public ListByteBufferContent(List<ByteBuffer> content) {
this.content = Objects.requireNonNull(content, "'content' cannot be null.");
}
@Override
public Long getLength() {
if (cachedLength == null) {
cachedLength = content.stream().mapToLong(Buffer::remaining).sum();
}
return cachedLength;
}
@Override
public String toString() {
return new String(toBytes(), StandardCharsets.UTF_8);
}
@Override
public byte[] toBytes() {
return BYTES_UPDATER.updateAndGet(this, bytes -> bytes == null ? getBytes() : bytes);
}
@Override
public <T> T toObject(TypeReference<T> typeReference, ObjectSerializer serializer) {
return serializer.deserializeFromBytes(toBytes(), typeReference);
}
@Override
public InputStream toStream() {
return new IterableOfByteBuffersInputStream(content);
}
@Override
public ByteBuffer toByteBuffer() {
return ByteBuffer.wrap(toBytes()).asReadOnlyBuffer();
}
@Override
public Flux<ByteBuffer> toFluxByteBuffer() {
return Flux.fromIterable(content).map(ByteBuffer::asReadOnlyBuffer);
}
@Override
public boolean isReplayable() {
return true;
}
@Override
public BinaryDataContent toReplayableContent() {
return this;
}
@Override
public Mono<BinaryDataContent> toReplayableContentAsync() {
return Mono.just(this);
}
} |
maybe instead of `Unhealthy`, `RevalidationNeeded` is a better one to use for status | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | addressUri.setUnhealthy(); | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} |
I initially was concerned that some netty timeouts might also come as IOExceptions - but that isn't true (they all implement io.netty.channel.ChannelException - so, I agree - this should be good. | public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
} | this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception)); | public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
}
private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
}
private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} |
+1 | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | addressUri.setUnhealthy(); | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} |
yes - they should be different types of exceptions | public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
} | this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception)); | public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
}
private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
}
private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} |
will update the name in a different PR - as it is internal implementation details, also the naming discussion may take some time | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | addressUri.setUnhealthy(); | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} |
It's not adding new state into the overall state transition right? | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | addressUri.setUnhealthy(); | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} |
not really, I am more thinking just name changing: change from `Unhealthy` -> `RevalidationNeeded`. But keep the differentiation can be useful in the future, will present two ways and discuss with team in a different PR. | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | addressUri.setUnhealthy(); | private int onConnectionEvent(final RntbdConnectionEvent event, final Throwable exception) {
checkNotNull(exception, "expected non-null exception");
if (event == RntbdConnectionEvent.READ_EOF || event == RntbdConnectionEvent.READ_FAILURE) {
if (logger.isDebugEnabled()) {
logger.debug("onConnectionEvent({\"event\":{},\"time\":{},\"endpoint\":{},\"cause\":{})",
event,
RntbdObjectMapper.toJson(Instant.now()),
RntbdObjectMapper.toJson(this.endpoint),
RntbdObjectMapper.toJson(exception));
}
for (Uri addressUri : this.addressUris) {
addressUri.setUnhealthy();
}
return addressUris.size();
}
return 0;
}
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} | class RntbdConnectionStateListener {
private static final Logger logger = LoggerFactory.getLogger(RntbdConnectionStateListener.class);
private final RntbdEndpoint endpoint;
private final RntbdConnectionStateListenerMetrics metrics;
private final Set<Uri> addressUris;
public RntbdConnectionStateListener(final RntbdEndpoint endpoint) {
this.endpoint = checkNotNull(endpoint, "expected non-null endpoint");
this.metrics = new RntbdConnectionStateListenerMetrics();
this.addressUris = ConcurrentHashMap.newKeySet();
}
public void onBeforeSendRequest(Uri addressUri) {
checkNotNull(addressUri, "Argument 'addressUri' should not be null");
this.addressUris.add(addressUri);
}
public void onException(Throwable exception) {
checkNotNull(exception, "expect non-null exception");
this.metrics.record();
if (exception instanceof IOException) {
if (exception instanceof ClosedChannelException) {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_EOF, exception));
} else {
this.metrics.recordAddressUpdated(this.onConnectionEvent(RntbdConnectionEvent.READ_FAILURE, exception));
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Will not raise the connection state change event for error", exception);
}
}
}
public RntbdConnectionStateListenerMetrics getMetrics() {
return this.metrics;
} |
Should we do as in `withLastAccessTimeTrackingPolicyEnabled`? Aka, if policy exist, only change the value of enable? | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false));
} | return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false)); | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() != null) {
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(false);
}
return this;
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} |
Despite we only change the enable to false, the whole `lastAccessTimeTrackingPolicy` will be set to null by backend. But why not, maybe the behavior will change in the future.. | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false));
} | return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false)); | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() != null) {
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(false);
}
return this;
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} |
OK, I see. That might be service behavior (if disabled, the whole object is no longer needed). But I guess if possible we still make client logic consistent (if backend works fine with it). | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false));
} | return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false)); | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() != null) {
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(false);
}
return this;
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} |
Sure. | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false));
} | return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false)); | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() != null) {
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(false);
}
return this;
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} |
Changed. | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false));
} | return withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy().withEnable(false)); | public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyDisabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() != null) {
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(false);
}
return this;
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} | class BlobServicePropertiesImpl
extends CreatableUpdatableImpl<BlobServiceProperties, BlobServicePropertiesInner, BlobServicePropertiesImpl>
implements BlobServiceProperties, BlobServiceProperties.Definition, BlobServiceProperties.Update {
private final StorageManager manager;
private String resourceGroupName;
private String accountName;
BlobServicePropertiesImpl(String name, StorageManager manager) {
super(name, new BlobServicePropertiesInner());
this.manager = manager;
this.accountName = name;
}
BlobServicePropertiesImpl(BlobServicePropertiesInner inner, StorageManager manager) {
super(inner.name(), inner);
this.manager = manager;
this.accountName = inner.name();
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "storageAccounts");
}
@Override
public StorageManager manager() {
return this.manager;
}
@Override
public Mono<BlobServiceProperties> createResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
public Mono<BlobServiceProperties> updateResourceAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client
.setServicePropertiesAsync(this.resourceGroupName, this.accountName, this.innerModel())
.map(innerToFluentMap(this));
}
@Override
protected Mono<BlobServicePropertiesInner> getInnerAsync() {
BlobServicesClient client = this.manager().serviceClient().getBlobServices();
return client.getServicePropertiesAsync(this.resourceGroupName, this.accountName);
}
@Override
public boolean isInCreateMode() {
return this.innerModel().id() == null;
}
@Override
public CorsRules cors() {
return this.innerModel().cors();
}
@Override
public String defaultServiceVersion() {
return this.innerModel().defaultServiceVersion();
}
@Override
public DeleteRetentionPolicy deleteRetentionPolicy() {
return this.innerModel().deleteRetentionPolicy();
}
@Override
public DeleteRetentionPolicy containerDeleteRetentionPolicy() {
return this.innerModel().containerDeleteRetentionPolicy();
}
@Override
public String id() {
return this.innerModel().id();
}
@Override
public String name() {
return this.innerModel().name();
}
@Override
public String type() {
return this.innerModel().type();
}
@Override
public Boolean isBlobVersioningEnabled() {
return this.innerModel().isVersioningEnabled();
}
@Override
public boolean isLastAccessTimeTrackingPolicyEnabled() {
return this.innerModel().lastAccessTimeTrackingPolicy() != null
&& ResourceManagerUtils.toPrimitiveBoolean(this.innerModel().lastAccessTimeTrackingPolicy().enable());
}
@Override
public LastAccessTimeTrackingPolicy lastAccessTimeTrackingPolicy() {
return this.innerModel().lastAccessTimeTrackingPolicy();
}
@Override
public BlobServicePropertiesImpl withExistingStorageAccount(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRules(List<CorsRule> corsRules) {
this.innerModel().withCors(new CorsRules().withCorsRules(corsRules));
return this;
}
@Override
public BlobServicePropertiesImpl withCORSRule(CorsRule corsRule) {
CorsRules corsRules = this.innerModel().cors();
if (corsRules == null) {
List<CorsRule> firstCorsRule = new ArrayList<>();
firstCorsRule.add(corsRule);
this.innerModel().withCors(new CorsRules().withCorsRules(firstCorsRule));
} else {
List<CorsRule> currentCorsRules = corsRules.corsRules();
currentCorsRules.add(corsRule);
this.innerModel().withCors(corsRules.withCorsRules(currentCorsRules));
}
return this;
}
@Override
public BlobServicePropertiesImpl withDefaultServiceVersion(String defaultServiceVersion) {
this.innerModel().withDefaultServiceVersion(defaultServiceVersion);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withDeleteRetentionPolicyDisabled() {
this.innerModel().withDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningEnabled() {
this.innerModel().withIsVersioningEnabled(true);
return this;
}
@Override
public BlobServicePropertiesImpl withBlobVersioningDisabled() {
this.innerModel().withIsVersioningEnabled(false);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicy(DeleteRetentionPolicy deleteRetentionPolicy) {
this.innerModel().withContainerDeleteRetentionPolicy(deleteRetentionPolicy);
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyEnabled(int numDaysEnabled) {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(true).withDays(numDaysEnabled));
return this;
}
@Override
public BlobServicePropertiesImpl withContainerDeleteRetentionPolicyDisabled() {
this.innerModel().withContainerDeleteRetentionPolicy(new DeleteRetentionPolicy().withEnabled(false));
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicyEnabled() {
if (this.innerModel().lastAccessTimeTrackingPolicy() == null) {
this.innerModel().withLastAccessTimeTrackingPolicy(new LastAccessTimeTrackingPolicy());
}
this.innerModel().lastAccessTimeTrackingPolicy().withEnable(true);
return this;
}
@Override
public BlobServicePropertiesImpl withLastAccessTimeTrackingPolicy(LastAccessTimeTrackingPolicy policy) {
this.innerModel().withLastAccessTimeTrackingPolicy(policy);
return this;
}
@Override
} |
Does the `SyncTokenPolicy` used in the pipeline and the one passed to the ConigurationClient need to be the same? | public ConfigurationClient buildClient() {
return new ConfigurationClient(buildInnerClient(), new SyncTokenPolicy());
} | return new ConfigurationClient(buildInnerClient(), new SyncTokenPolicy()); | public ConfigurationClient buildClient() {
final SyncTokenPolicy syncTokenPolicy = new SyncTokenPolicy();
return new ConfigurationClient(buildInnerClient(syncTokenPolicy), syncTokenPolicy);
} | class ConfigurationClientBuilder implements
TokenCredentialTrait<ConfigurationClientBuilder>,
ConnectionStringTrait<ConfigurationClientBuilder>,
HttpTrait<ConfigurationClientBuilder>,
ConfigurationTrait<ConfigurationClientBuilder>,
EndpointTrait<ConfigurationClientBuilder> {
private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy("retry-after-ms", ChronoUnit.MILLIS);
/**
* The serializer to serialize an object into a string.
*/
private static final SerializerAdapter SERIALIZER_ADAPTER;
private static final String CLIENT_NAME;
private static final String CLIENT_VERSION;
private static final HttpPipelinePolicy ADD_HEADERS_POLICY;
static {
Map<String, String> properties = CoreUtils.getProperties("azure-data-appconfiguration.properties");
CLIENT_NAME = properties.getOrDefault("name", "UnknownName");
CLIENT_VERSION = properties.getOrDefault("version", "UnknownVersion");
ADD_HEADERS_POLICY = new AddHeadersPolicy(new HttpHeaders()
.set("x-ms-return-client-request-id", "true")
.set("Content-Type", "application/json")
.set("Accept", "application/vnd.microsoft.azconfig.kv+json"));
JacksonAdapter jacksonAdapter = new JacksonAdapter();
jacksonAdapter.serializer().registerModule(ConfigurationSettingJsonSerializer.getModule());
jacksonAdapter.serializer().registerModule(ConfigurationSettingJsonDeserializer.getModule());
SERIALIZER_ADAPTER = jacksonAdapter;
}
private final ClientLogger logger = new ClientLogger(ConfigurationClientBuilder.class);
private final List<HttpPipelinePolicy> perCallPolicies = new ArrayList<>();
private final List<HttpPipelinePolicy> perRetryPolicies = new ArrayList<>();
private ClientOptions clientOptions;
private ConfigurationClientCredentials credential;
private TokenCredential tokenCredential;
private String endpoint;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private HttpPipeline pipeline;
private HttpPipelinePolicy retryPolicy;
private RetryOptions retryOptions;
private Configuration configuration;
private ConfigurationServiceVersion version;
/**
* Constructs a new builder used to configure and build {@link ConfigurationClient ConfigurationClients} and {@link
* ConfigurationAsyncClient ConfigurationAsyncClients}.
*/
public ConfigurationClientBuilder() {
httpLogOptions = new HttpLogOptions();
}
/**
* Creates a {@link ConfigurationClient} based on options set in the Builder. Every time {@code buildClient()} is
* called a new instance of {@link ConfigurationClient} is created.
* <p>
* If {@link
* endpoint} are used to create the {@link ConfigurationClient client}. All other builder settings are ignored.</p>
*
* @return A ConfigurationClient with the options set from the builder.
* @throws NullPointerException If {@code endpoint} has not been set. This setting is automatically set when {@link
*
*
* @throws IllegalStateException If {@link
* @throws IllegalStateException If both {@link
* and {@link
*/
/**
* Creates a {@link ConfigurationAsyncClient} based on options set in the Builder. Every time {@code
* buildAsyncClient()} is called a new instance of {@link ConfigurationAsyncClient} is created.
* <p>
* If {@link
* endpoint} are used to create the {@link ConfigurationAsyncClient client}. All other builder settings are
* ignored.
*
* @return A ConfigurationAsyncClient with the options set from the builder.
* @throws NullPointerException If {@code endpoint} has not been set. This setting is automatically set when {@link
*
*
* @throws IllegalStateException If {@link
* @throws IllegalStateException If both {@link
* and {@link
*/
public ConfigurationAsyncClient buildAsyncClient() {
return new ConfigurationAsyncClient(buildInnerClient(), new SyncTokenPolicy());
}
/**
* Builds an instance of ConfigurationClientImpl with the provided parameters.
*
* @return an instance of ConfigurationClientImpl.
*/
private ConfigurationClientImpl buildInnerClient() {
ConfigurationServiceVersion serviceVersion = (version != null)
? version
: ConfigurationServiceVersion.getLatest();
return new ConfigurationClientImpl(
pipeline == null ? createHttpPipeline() : pipeline,
SERIALIZER_ADAPTER,
endpoint,
serviceVersion.getVersion());
}
private HttpPipeline createHttpPipeline() {
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
String buildEndpoint = endpoint;
if (tokenCredential == null) {
buildEndpoint = getBuildEndpoint();
}
Objects.requireNonNull(buildEndpoint, "'Endpoint' is required and can not be null.");
final List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(
getApplicationId(clientOptions, httpLogOptions), CLIENT_NAME, CLIENT_VERSION, buildConfiguration));
policies.add(new RequestIdPolicy());
policies.add(new AddHeadersFromContextPolicy());
policies.add(ADD_HEADERS_POLICY);
policies.addAll(perCallPolicies);
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(ClientBuilderUtil.validateAndGetRetryPolicy(retryPolicy, retryOptions, DEFAULT_RETRY_POLICY));
policies.add(new AddDatePolicy());
if (tokenCredential != null) {
policies.add(
new BearerTokenAuthenticationPolicy(tokenCredential, String.format("%s/.default", buildEndpoint)));
} else if (credential != null) {
policies.add(new ConfigurationCredentialsPolicy(credential));
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
policies.add(new SyncTokenPolicy());
policies.addAll(perRetryPolicies);
if (clientOptions != null) {
List<HttpHeader> httpHeaderList = new ArrayList<>();
clientOptions.getHeaders().forEach(
header -> httpHeaderList.add(new HttpHeader(header.getName(), header.getValue())));
policies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList)));
}
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
}
/**
* Sets the service endpoint for the Azure App Configuration instance.
*
* @param endpoint The URL of the Azure App Configuration instance.
* @return The updated ConfigurationClientBuilder object.
* @throws IllegalArgumentException If {@code endpoint} is null, or it cannot be parsed into a valid URL.
*/
@Override
public ConfigurationClientBuilder endpoint(String endpoint) {
try {
new URL(endpoint);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL", ex));
}
this.endpoint = endpoint;
return this;
}
/**
* Allows for setting common properties such as application ID, headers, proxy configuration, etc. Note that it is
* recommended that this method be called with an instance of the {@link HttpClientOptions}
* class (a subclass of the {@link ClientOptions} base class). The HttpClientOptions subclass provides more
* configuration options suitable for HTTP clients, which is applicable for any class that implements this HttpTrait
* interface.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param clientOptions A configured instance of {@link HttpClientOptions}.
* @see HttpClientOptions
* @return the updated ConfigurationClientBuilder object
*/
@Override
public ConfigurationClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential to use when authenticating HTTP requests. Also, sets the {@link
* for this ConfigurationClientBuilder.
*
* @param connectionString Connection string in the format "endpoint={endpoint_value};id={id_value};
* secret={secret_value}"
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code connectionString} is null.
* @throws IllegalArgumentException If {@code connectionString} is an empty string, the {@code connectionString}
* secret is invalid, or the HMAC-SHA256 MAC algorithm cannot be instantiated.
*/
@Override
public ConfigurationClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
if (connectionString.isEmpty()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'connectionString' cannot be an empty string."));
}
try {
this.credential = new ConfigurationClientCredentials(connectionString);
} catch (InvalidKeyException err) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"The secret contained within the connection string is invalid and cannot instantiate the HMAC-SHA256"
+ " algorithm.", err));
} catch (NoSuchAlgorithmException err) {
throw logger.logExceptionAsError(
new IllegalArgumentException("HMAC-SHA256 MAC algorithm cannot be instantiated.", err));
}
this.endpoint = credential.getBaseUri();
return this;
}
/**
* Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java
* <a href="https:
* documentation for more details on proper usage of the {@link TokenCredential} type.
*
* @param tokenCredential {@link TokenCredential} used to authorize requests sent to the service.
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code credential} is null.
*/
@Override
public ConfigurationClientBuilder credential(TokenCredential tokenCredential) {
Objects.requireNonNull(tokenCredential);
this.tokenCredential = tokenCredential;
return this;
}
/**
* Sets the {@link HttpLogOptions logging configuration} to use when sending and receiving requests to and from
* the service. If a {@code logLevel} is not provided, default value of {@link HttpLogDetailLevel
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param logOptions The {@link HttpLogOptions logging configuration} to use when sending and receiving requests to
* and from the service.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder httpLogOptions(HttpLogOptions logOptions) {
httpLogOptions = logOptions;
return this;
}
/**
* Adds a {@link HttpPipelinePolicy pipeline policy} to apply on each request sent.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param policy A {@link HttpPipelinePolicy pipeline policy}.
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code policy} is null.
*/
@Override
public ConfigurationClientBuilder addPolicy(HttpPipelinePolicy policy) {
Objects.requireNonNull(policy, "'policy' cannot be null.");
if (policy.getPipelinePosition() == HttpPipelinePosition.PER_CALL) {
perCallPolicies.add(policy);
} else {
perRetryPolicies.add(policy);
}
return this;
}
/**
* Sets the {@link HttpClient} to use for sending and receiving requests to and from the service.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param client The {@link HttpClient} to use for requests.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder httpClient(HttpClient client) {
if (this.httpClient != null && client == null) {
logger.info("HttpClient is being set to 'null' when it was previously configured.");
}
this.httpClient = client;
return this;
}
/**
* Sets the {@link HttpPipeline} to use for the service client.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
* <p>
* The {@link
*
* @param pipeline {@link HttpPipeline} to use for sending service requests and receiving responses.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder pipeline(HttpPipeline pipeline) {
if (this.pipeline != null && pipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.pipeline = pipeline;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link HttpPipelinePolicy} that is used to retry requests.
* <p>
* The default retry policy will be used if not provided {@link ConfigurationClientBuilder
* build {@link ConfigurationAsyncClient} or {@link ConfigurationClient}.
* <p>
* Setting this is mutually exclusive with using {@link
*
* @param retryPolicy The {@link HttpPipelinePolicy} that will be used to retry requests. For example,
* {@link RetryPolicy} can be used to retry requests.
*
* @return The updated ConfigurationClientBuilder object.
*/
public ConfigurationClientBuilder retryPolicy(HttpPipelinePolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
/**
* Sets the {@link RetryOptions} for all the requests made through the client.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
* <p>
* Setting this is mutually exclusive with using {@link
*
* @param retryOptions The {@link RetryOptions} to use for all the requests made through the client.
* @return The updated {@link ConfigurationClientBuilder} object.
*/
@Override
public ConfigurationClientBuilder retryOptions(RetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the {@link ConfigurationServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link ConfigurationServiceVersion} of the service to be used when making requests.
* @return The updated ConfigurationClientBuilder object.
*/
public ConfigurationClientBuilder serviceVersion(ConfigurationServiceVersion version) {
this.version = version;
return this;
}
private String getBuildEndpoint() {
if (endpoint != null) {
return endpoint;
} else if (credential != null) {
return credential.getBaseUri();
} else {
return null;
}
}
} | class ConfigurationClientBuilder implements
TokenCredentialTrait<ConfigurationClientBuilder>,
ConnectionStringTrait<ConfigurationClientBuilder>,
HttpTrait<ConfigurationClientBuilder>,
ConfigurationTrait<ConfigurationClientBuilder>,
EndpointTrait<ConfigurationClientBuilder> {
/**
* The serializer to serialize an object into a string.
*/
private static final SerializerAdapter SERIALIZER_ADAPTER;
private static final String CLIENT_NAME;
private static final String CLIENT_VERSION;
private static final HttpPipelinePolicy ADD_HEADERS_POLICY;
static {
Map<String, String> properties = CoreUtils.getProperties("azure-data-appconfiguration.properties");
CLIENT_NAME = properties.getOrDefault("name", "UnknownName");
CLIENT_VERSION = properties.getOrDefault("version", "UnknownVersion");
ADD_HEADERS_POLICY = new AddHeadersPolicy(new HttpHeaders()
.set("x-ms-return-client-request-id", "true")
.set("Content-Type", "application/json")
.set("Accept", "application/vnd.microsoft.azconfig.kv+json"));
JacksonAdapter jacksonAdapter = new JacksonAdapter();
jacksonAdapter.serializer().registerModule(ConfigurationSettingJsonSerializer.getModule());
jacksonAdapter.serializer().registerModule(ConfigurationSettingJsonDeserializer.getModule());
SERIALIZER_ADAPTER = jacksonAdapter;
}
private final ClientLogger logger = new ClientLogger(ConfigurationClientBuilder.class);
private final List<HttpPipelinePolicy> perCallPolicies = new ArrayList<>();
private final List<HttpPipelinePolicy> perRetryPolicies = new ArrayList<>();
private ClientOptions clientOptions;
private ConfigurationClientCredentials credential;
private TokenCredential tokenCredential;
private String endpoint;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private HttpPipeline pipeline;
private HttpPipelinePolicy retryPolicy;
private RetryOptions retryOptions;
private Configuration configuration;
private ConfigurationServiceVersion version;
/**
* Constructs a new builder used to configure and build {@link ConfigurationClient ConfigurationClients} and {@link
* ConfigurationAsyncClient ConfigurationAsyncClients}.
*/
public ConfigurationClientBuilder() {
httpLogOptions = new HttpLogOptions();
}
/**
* Creates a {@link ConfigurationClient} based on options set in the Builder. Every time {@code buildClient()} is
* called a new instance of {@link ConfigurationClient} is created.
* <p>
* If {@link
* endpoint} are used to create the {@link ConfigurationClient client}. All other builder settings are ignored.</p>
*
* @return A ConfigurationClient with the options set from the builder.
* @throws NullPointerException If {@code endpoint} has not been set. This setting is automatically set when {@link
*
*
* @throws IllegalStateException If {@link
* @throws IllegalStateException If both {@link
* and {@link
*/
/**
* Creates a {@link ConfigurationAsyncClient} based on options set in the Builder. Every time {@code
* buildAsyncClient()} is called a new instance of {@link ConfigurationAsyncClient} is created.
* <p>
* If {@link
* endpoint} are used to create the {@link ConfigurationAsyncClient client}. All other builder settings are
* ignored.
*
* @return A ConfigurationAsyncClient with the options set from the builder.
* @throws NullPointerException If {@code endpoint} has not been set. This setting is automatically set when {@link
*
*
* @throws IllegalStateException If {@link
* @throws IllegalStateException If both {@link
* and {@link
*/
public ConfigurationAsyncClient buildAsyncClient() {
final SyncTokenPolicy syncTokenPolicy = new SyncTokenPolicy();
return new ConfigurationAsyncClient(buildInnerClient(syncTokenPolicy), syncTokenPolicy);
}
/**
* Builds an instance of ConfigurationClientImpl with the provided parameters.
*
* @return an instance of ConfigurationClientImpl.
*/
private ConfigurationClientImpl buildInnerClient(SyncTokenPolicy syncTokenPolicy) {
ConfigurationServiceVersion serviceVersion = (version != null)
? version
: ConfigurationServiceVersion.getLatest();
return new ConfigurationClientImpl(
pipeline == null ? createHttpPipeline(syncTokenPolicy) : pipeline,
SERIALIZER_ADAPTER,
endpoint,
serviceVersion.getVersion());
}
private HttpPipeline createHttpPipeline(SyncTokenPolicy syncTokenPolicy) {
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
String buildEndpoint = endpoint;
if (tokenCredential == null) {
buildEndpoint = getBuildEndpoint();
}
Objects.requireNonNull(buildEndpoint, "'Endpoint' is required and can not be null.");
final List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(
getApplicationId(clientOptions, httpLogOptions), CLIENT_NAME, CLIENT_VERSION, buildConfiguration));
policies.add(new RequestIdPolicy());
policies.add(new AddHeadersFromContextPolicy());
policies.add(ADD_HEADERS_POLICY);
policies.addAll(perCallPolicies);
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(ClientBuilderUtil.validateAndGetRetryPolicy(retryPolicy, retryOptions,
new RetryPolicy("retry-after-ms", ChronoUnit.MILLIS)));
policies.add(new AddDatePolicy());
if (tokenCredential != null) {
policies.add(
new BearerTokenAuthenticationPolicy(tokenCredential, String.format("%s/.default", buildEndpoint)));
} else if (credential != null) {
policies.add(new ConfigurationCredentialsPolicy(credential));
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
policies.add(syncTokenPolicy);
policies.addAll(perRetryPolicies);
if (clientOptions != null) {
List<HttpHeader> httpHeaderList = new ArrayList<>();
clientOptions.getHeaders().forEach(
header -> httpHeaderList.add(new HttpHeader(header.getName(), header.getValue())));
policies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList)));
}
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
}
/**
* Sets the service endpoint for the Azure App Configuration instance.
*
* @param endpoint The URL of the Azure App Configuration instance.
* @return The updated ConfigurationClientBuilder object.
* @throws IllegalArgumentException If {@code endpoint} is null, or it cannot be parsed into a valid URL.
*/
@Override
public ConfigurationClientBuilder endpoint(String endpoint) {
try {
new URL(endpoint);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL", ex));
}
this.endpoint = endpoint;
return this;
}
/**
* Allows for setting common properties such as application ID, headers, proxy configuration, etc. Note that it is
* recommended that this method be called with an instance of the {@link HttpClientOptions}
* class (a subclass of the {@link ClientOptions} base class). The HttpClientOptions subclass provides more
* configuration options suitable for HTTP clients, which is applicable for any class that implements this HttpTrait
* interface.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param clientOptions A configured instance of {@link HttpClientOptions}.
* @see HttpClientOptions
* @return the updated ConfigurationClientBuilder object
*/
@Override
public ConfigurationClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential to use when authenticating HTTP requests. Also, sets the {@link
* for this ConfigurationClientBuilder.
*
* @param connectionString Connection string in the format "endpoint={endpoint_value};id={id_value};
* secret={secret_value}"
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code connectionString} is null.
* @throws IllegalArgumentException If {@code connectionString} is an empty string, the {@code connectionString}
* secret is invalid, or the HMAC-SHA256 MAC algorithm cannot be instantiated.
*/
@Override
public ConfigurationClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
if (connectionString.isEmpty()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'connectionString' cannot be an empty string."));
}
try {
this.credential = new ConfigurationClientCredentials(connectionString);
} catch (InvalidKeyException err) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"The secret contained within the connection string is invalid and cannot instantiate the HMAC-SHA256"
+ " algorithm.", err));
} catch (NoSuchAlgorithmException err) {
throw logger.logExceptionAsError(
new IllegalArgumentException("HMAC-SHA256 MAC algorithm cannot be instantiated.", err));
}
this.endpoint = credential.getBaseUri();
return this;
}
/**
* Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java
* <a href="https:
* documentation for more details on proper usage of the {@link TokenCredential} type.
*
* @param tokenCredential {@link TokenCredential} used to authorize requests sent to the service.
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code credential} is null.
*/
@Override
public ConfigurationClientBuilder credential(TokenCredential tokenCredential) {
Objects.requireNonNull(tokenCredential);
this.tokenCredential = tokenCredential;
return this;
}
/**
* Sets the {@link HttpLogOptions logging configuration} to use when sending and receiving requests to and from
* the service. If a {@code logLevel} is not provided, default value of {@link HttpLogDetailLevel
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param logOptions The {@link HttpLogOptions logging configuration} to use when sending and receiving requests to
* and from the service.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder httpLogOptions(HttpLogOptions logOptions) {
httpLogOptions = logOptions;
return this;
}
/**
* Adds a {@link HttpPipelinePolicy pipeline policy} to apply on each request sent.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param policy A {@link HttpPipelinePolicy pipeline policy}.
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code policy} is null.
*/
@Override
public ConfigurationClientBuilder addPolicy(HttpPipelinePolicy policy) {
Objects.requireNonNull(policy, "'policy' cannot be null.");
if (policy.getPipelinePosition() == HttpPipelinePosition.PER_CALL) {
perCallPolicies.add(policy);
} else {
perRetryPolicies.add(policy);
}
return this;
}
/**
* Sets the {@link HttpClient} to use for sending and receiving requests to and from the service.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param client The {@link HttpClient} to use for requests.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder httpClient(HttpClient client) {
if (this.httpClient != null && client == null) {
logger.info("HttpClient is being set to 'null' when it was previously configured.");
}
this.httpClient = client;
return this;
}
/**
* Sets the {@link HttpPipeline} to use for the service client.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
* <p>
* The {@link
*
* @param pipeline {@link HttpPipeline} to use for sending service requests and receiving responses.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder pipeline(HttpPipeline pipeline) {
if (this.pipeline != null && pipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.pipeline = pipeline;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link HttpPipelinePolicy} that is used to retry requests.
* <p>
* The default retry policy will be used if not provided {@link ConfigurationClientBuilder
* build {@link ConfigurationAsyncClient} or {@link ConfigurationClient}.
* <p>
* Setting this is mutually exclusive with using {@link
*
* @param retryPolicy The {@link HttpPipelinePolicy} that will be used to retry requests. For example,
* {@link RetryPolicy} can be used to retry requests.
*
* @return The updated ConfigurationClientBuilder object.
*/
public ConfigurationClientBuilder retryPolicy(HttpPipelinePolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
/**
* Sets the {@link RetryOptions} for all the requests made through the client.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
* <p>
* Setting this is mutually exclusive with using {@link
*
* @param retryOptions The {@link RetryOptions} to use for all the requests made through the client.
* @return The updated {@link ConfigurationClientBuilder} object.
*/
@Override
public ConfigurationClientBuilder retryOptions(RetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the {@link ConfigurationServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link ConfigurationServiceVersion} of the service to be used when making requests.
* @return The updated ConfigurationClientBuilder object.
*/
public ConfigurationClientBuilder serviceVersion(ConfigurationServiceVersion version) {
this.version = version;
return this;
}
private String getBuildEndpoint() {
if (endpoint != null) {
return endpoint;
} else if (credential != null) {
return credential.getBaseUri();
} else {
return null;
}
}
} |
SyncTokenPolicy needs to be the same object. Updated. | public ConfigurationClient buildClient() {
return new ConfigurationClient(buildInnerClient(), new SyncTokenPolicy());
} | return new ConfigurationClient(buildInnerClient(), new SyncTokenPolicy()); | public ConfigurationClient buildClient() {
final SyncTokenPolicy syncTokenPolicy = new SyncTokenPolicy();
return new ConfigurationClient(buildInnerClient(syncTokenPolicy), syncTokenPolicy);
} | class ConfigurationClientBuilder implements
TokenCredentialTrait<ConfigurationClientBuilder>,
ConnectionStringTrait<ConfigurationClientBuilder>,
HttpTrait<ConfigurationClientBuilder>,
ConfigurationTrait<ConfigurationClientBuilder>,
EndpointTrait<ConfigurationClientBuilder> {
private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy("retry-after-ms", ChronoUnit.MILLIS);
/**
* The serializer to serialize an object into a string.
*/
private static final SerializerAdapter SERIALIZER_ADAPTER;
private static final String CLIENT_NAME;
private static final String CLIENT_VERSION;
private static final HttpPipelinePolicy ADD_HEADERS_POLICY;
static {
Map<String, String> properties = CoreUtils.getProperties("azure-data-appconfiguration.properties");
CLIENT_NAME = properties.getOrDefault("name", "UnknownName");
CLIENT_VERSION = properties.getOrDefault("version", "UnknownVersion");
ADD_HEADERS_POLICY = new AddHeadersPolicy(new HttpHeaders()
.set("x-ms-return-client-request-id", "true")
.set("Content-Type", "application/json")
.set("Accept", "application/vnd.microsoft.azconfig.kv+json"));
JacksonAdapter jacksonAdapter = new JacksonAdapter();
jacksonAdapter.serializer().registerModule(ConfigurationSettingJsonSerializer.getModule());
jacksonAdapter.serializer().registerModule(ConfigurationSettingJsonDeserializer.getModule());
SERIALIZER_ADAPTER = jacksonAdapter;
}
private final ClientLogger logger = new ClientLogger(ConfigurationClientBuilder.class);
private final List<HttpPipelinePolicy> perCallPolicies = new ArrayList<>();
private final List<HttpPipelinePolicy> perRetryPolicies = new ArrayList<>();
private ClientOptions clientOptions;
private ConfigurationClientCredentials credential;
private TokenCredential tokenCredential;
private String endpoint;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private HttpPipeline pipeline;
private HttpPipelinePolicy retryPolicy;
private RetryOptions retryOptions;
private Configuration configuration;
private ConfigurationServiceVersion version;
/**
* Constructs a new builder used to configure and build {@link ConfigurationClient ConfigurationClients} and {@link
* ConfigurationAsyncClient ConfigurationAsyncClients}.
*/
public ConfigurationClientBuilder() {
httpLogOptions = new HttpLogOptions();
}
/**
* Creates a {@link ConfigurationClient} based on options set in the Builder. Every time {@code buildClient()} is
* called a new instance of {@link ConfigurationClient} is created.
* <p>
* If {@link
* endpoint} are used to create the {@link ConfigurationClient client}. All other builder settings are ignored.</p>
*
* @return A ConfigurationClient with the options set from the builder.
* @throws NullPointerException If {@code endpoint} has not been set. This setting is automatically set when {@link
*
*
* @throws IllegalStateException If {@link
* @throws IllegalStateException If both {@link
* and {@link
*/
/**
* Creates a {@link ConfigurationAsyncClient} based on options set in the Builder. Every time {@code
* buildAsyncClient()} is called a new instance of {@link ConfigurationAsyncClient} is created.
* <p>
* If {@link
* endpoint} are used to create the {@link ConfigurationAsyncClient client}. All other builder settings are
* ignored.
*
* @return A ConfigurationAsyncClient with the options set from the builder.
* @throws NullPointerException If {@code endpoint} has not been set. This setting is automatically set when {@link
*
*
* @throws IllegalStateException If {@link
* @throws IllegalStateException If both {@link
* and {@link
*/
public ConfigurationAsyncClient buildAsyncClient() {
return new ConfigurationAsyncClient(buildInnerClient(), new SyncTokenPolicy());
}
/**
* Builds an instance of ConfigurationClientImpl with the provided parameters.
*
* @return an instance of ConfigurationClientImpl.
*/
private ConfigurationClientImpl buildInnerClient() {
ConfigurationServiceVersion serviceVersion = (version != null)
? version
: ConfigurationServiceVersion.getLatest();
return new ConfigurationClientImpl(
pipeline == null ? createHttpPipeline() : pipeline,
SERIALIZER_ADAPTER,
endpoint,
serviceVersion.getVersion());
}
private HttpPipeline createHttpPipeline() {
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
String buildEndpoint = endpoint;
if (tokenCredential == null) {
buildEndpoint = getBuildEndpoint();
}
Objects.requireNonNull(buildEndpoint, "'Endpoint' is required and can not be null.");
final List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(
getApplicationId(clientOptions, httpLogOptions), CLIENT_NAME, CLIENT_VERSION, buildConfiguration));
policies.add(new RequestIdPolicy());
policies.add(new AddHeadersFromContextPolicy());
policies.add(ADD_HEADERS_POLICY);
policies.addAll(perCallPolicies);
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(ClientBuilderUtil.validateAndGetRetryPolicy(retryPolicy, retryOptions, DEFAULT_RETRY_POLICY));
policies.add(new AddDatePolicy());
if (tokenCredential != null) {
policies.add(
new BearerTokenAuthenticationPolicy(tokenCredential, String.format("%s/.default", buildEndpoint)));
} else if (credential != null) {
policies.add(new ConfigurationCredentialsPolicy(credential));
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
policies.add(new SyncTokenPolicy());
policies.addAll(perRetryPolicies);
if (clientOptions != null) {
List<HttpHeader> httpHeaderList = new ArrayList<>();
clientOptions.getHeaders().forEach(
header -> httpHeaderList.add(new HttpHeader(header.getName(), header.getValue())));
policies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList)));
}
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
}
/**
* Sets the service endpoint for the Azure App Configuration instance.
*
* @param endpoint The URL of the Azure App Configuration instance.
* @return The updated ConfigurationClientBuilder object.
* @throws IllegalArgumentException If {@code endpoint} is null, or it cannot be parsed into a valid URL.
*/
@Override
public ConfigurationClientBuilder endpoint(String endpoint) {
try {
new URL(endpoint);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL", ex));
}
this.endpoint = endpoint;
return this;
}
/**
* Allows for setting common properties such as application ID, headers, proxy configuration, etc. Note that it is
* recommended that this method be called with an instance of the {@link HttpClientOptions}
* class (a subclass of the {@link ClientOptions} base class). The HttpClientOptions subclass provides more
* configuration options suitable for HTTP clients, which is applicable for any class that implements this HttpTrait
* interface.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param clientOptions A configured instance of {@link HttpClientOptions}.
* @see HttpClientOptions
* @return the updated ConfigurationClientBuilder object
*/
@Override
public ConfigurationClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential to use when authenticating HTTP requests. Also, sets the {@link
* for this ConfigurationClientBuilder.
*
* @param connectionString Connection string in the format "endpoint={endpoint_value};id={id_value};
* secret={secret_value}"
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code connectionString} is null.
* @throws IllegalArgumentException If {@code connectionString} is an empty string, the {@code connectionString}
* secret is invalid, or the HMAC-SHA256 MAC algorithm cannot be instantiated.
*/
@Override
public ConfigurationClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
if (connectionString.isEmpty()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'connectionString' cannot be an empty string."));
}
try {
this.credential = new ConfigurationClientCredentials(connectionString);
} catch (InvalidKeyException err) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"The secret contained within the connection string is invalid and cannot instantiate the HMAC-SHA256"
+ " algorithm.", err));
} catch (NoSuchAlgorithmException err) {
throw logger.logExceptionAsError(
new IllegalArgumentException("HMAC-SHA256 MAC algorithm cannot be instantiated.", err));
}
this.endpoint = credential.getBaseUri();
return this;
}
/**
* Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java
* <a href="https:
* documentation for more details on proper usage of the {@link TokenCredential} type.
*
* @param tokenCredential {@link TokenCredential} used to authorize requests sent to the service.
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code credential} is null.
*/
@Override
public ConfigurationClientBuilder credential(TokenCredential tokenCredential) {
Objects.requireNonNull(tokenCredential);
this.tokenCredential = tokenCredential;
return this;
}
/**
* Sets the {@link HttpLogOptions logging configuration} to use when sending and receiving requests to and from
* the service. If a {@code logLevel} is not provided, default value of {@link HttpLogDetailLevel
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param logOptions The {@link HttpLogOptions logging configuration} to use when sending and receiving requests to
* and from the service.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder httpLogOptions(HttpLogOptions logOptions) {
httpLogOptions = logOptions;
return this;
}
/**
* Adds a {@link HttpPipelinePolicy pipeline policy} to apply on each request sent.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param policy A {@link HttpPipelinePolicy pipeline policy}.
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code policy} is null.
*/
@Override
public ConfigurationClientBuilder addPolicy(HttpPipelinePolicy policy) {
Objects.requireNonNull(policy, "'policy' cannot be null.");
if (policy.getPipelinePosition() == HttpPipelinePosition.PER_CALL) {
perCallPolicies.add(policy);
} else {
perRetryPolicies.add(policy);
}
return this;
}
/**
* Sets the {@link HttpClient} to use for sending and receiving requests to and from the service.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param client The {@link HttpClient} to use for requests.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder httpClient(HttpClient client) {
if (this.httpClient != null && client == null) {
logger.info("HttpClient is being set to 'null' when it was previously configured.");
}
this.httpClient = client;
return this;
}
/**
* Sets the {@link HttpPipeline} to use for the service client.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
* <p>
* The {@link
*
* @param pipeline {@link HttpPipeline} to use for sending service requests and receiving responses.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder pipeline(HttpPipeline pipeline) {
if (this.pipeline != null && pipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.pipeline = pipeline;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link HttpPipelinePolicy} that is used to retry requests.
* <p>
* The default retry policy will be used if not provided {@link ConfigurationClientBuilder
* build {@link ConfigurationAsyncClient} or {@link ConfigurationClient}.
* <p>
* Setting this is mutually exclusive with using {@link
*
* @param retryPolicy The {@link HttpPipelinePolicy} that will be used to retry requests. For example,
* {@link RetryPolicy} can be used to retry requests.
*
* @return The updated ConfigurationClientBuilder object.
*/
public ConfigurationClientBuilder retryPolicy(HttpPipelinePolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
/**
* Sets the {@link RetryOptions} for all the requests made through the client.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
* <p>
* Setting this is mutually exclusive with using {@link
*
* @param retryOptions The {@link RetryOptions} to use for all the requests made through the client.
* @return The updated {@link ConfigurationClientBuilder} object.
*/
@Override
public ConfigurationClientBuilder retryOptions(RetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the {@link ConfigurationServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link ConfigurationServiceVersion} of the service to be used when making requests.
* @return The updated ConfigurationClientBuilder object.
*/
public ConfigurationClientBuilder serviceVersion(ConfigurationServiceVersion version) {
this.version = version;
return this;
}
private String getBuildEndpoint() {
if (endpoint != null) {
return endpoint;
} else if (credential != null) {
return credential.getBaseUri();
} else {
return null;
}
}
} | class ConfigurationClientBuilder implements
TokenCredentialTrait<ConfigurationClientBuilder>,
ConnectionStringTrait<ConfigurationClientBuilder>,
HttpTrait<ConfigurationClientBuilder>,
ConfigurationTrait<ConfigurationClientBuilder>,
EndpointTrait<ConfigurationClientBuilder> {
/**
* The serializer to serialize an object into a string.
*/
private static final SerializerAdapter SERIALIZER_ADAPTER;
private static final String CLIENT_NAME;
private static final String CLIENT_VERSION;
private static final HttpPipelinePolicy ADD_HEADERS_POLICY;
static {
Map<String, String> properties = CoreUtils.getProperties("azure-data-appconfiguration.properties");
CLIENT_NAME = properties.getOrDefault("name", "UnknownName");
CLIENT_VERSION = properties.getOrDefault("version", "UnknownVersion");
ADD_HEADERS_POLICY = new AddHeadersPolicy(new HttpHeaders()
.set("x-ms-return-client-request-id", "true")
.set("Content-Type", "application/json")
.set("Accept", "application/vnd.microsoft.azconfig.kv+json"));
JacksonAdapter jacksonAdapter = new JacksonAdapter();
jacksonAdapter.serializer().registerModule(ConfigurationSettingJsonSerializer.getModule());
jacksonAdapter.serializer().registerModule(ConfigurationSettingJsonDeserializer.getModule());
SERIALIZER_ADAPTER = jacksonAdapter;
}
private final ClientLogger logger = new ClientLogger(ConfigurationClientBuilder.class);
private final List<HttpPipelinePolicy> perCallPolicies = new ArrayList<>();
private final List<HttpPipelinePolicy> perRetryPolicies = new ArrayList<>();
private ClientOptions clientOptions;
private ConfigurationClientCredentials credential;
private TokenCredential tokenCredential;
private String endpoint;
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private HttpPipeline pipeline;
private HttpPipelinePolicy retryPolicy;
private RetryOptions retryOptions;
private Configuration configuration;
private ConfigurationServiceVersion version;
/**
* Constructs a new builder used to configure and build {@link ConfigurationClient ConfigurationClients} and {@link
* ConfigurationAsyncClient ConfigurationAsyncClients}.
*/
public ConfigurationClientBuilder() {
httpLogOptions = new HttpLogOptions();
}
/**
* Creates a {@link ConfigurationClient} based on options set in the Builder. Every time {@code buildClient()} is
* called a new instance of {@link ConfigurationClient} is created.
* <p>
* If {@link
* endpoint} are used to create the {@link ConfigurationClient client}. All other builder settings are ignored.</p>
*
* @return A ConfigurationClient with the options set from the builder.
* @throws NullPointerException If {@code endpoint} has not been set. This setting is automatically set when {@link
*
*
* @throws IllegalStateException If {@link
* @throws IllegalStateException If both {@link
* and {@link
*/
/**
* Creates a {@link ConfigurationAsyncClient} based on options set in the Builder. Every time {@code
* buildAsyncClient()} is called a new instance of {@link ConfigurationAsyncClient} is created.
* <p>
* If {@link
* endpoint} are used to create the {@link ConfigurationAsyncClient client}. All other builder settings are
* ignored.
*
* @return A ConfigurationAsyncClient with the options set from the builder.
* @throws NullPointerException If {@code endpoint} has not been set. This setting is automatically set when {@link
*
*
* @throws IllegalStateException If {@link
* @throws IllegalStateException If both {@link
* and {@link
*/
public ConfigurationAsyncClient buildAsyncClient() {
final SyncTokenPolicy syncTokenPolicy = new SyncTokenPolicy();
return new ConfigurationAsyncClient(buildInnerClient(syncTokenPolicy), syncTokenPolicy);
}
/**
* Builds an instance of ConfigurationClientImpl with the provided parameters.
*
* @return an instance of ConfigurationClientImpl.
*/
private ConfigurationClientImpl buildInnerClient(SyncTokenPolicy syncTokenPolicy) {
ConfigurationServiceVersion serviceVersion = (version != null)
? version
: ConfigurationServiceVersion.getLatest();
return new ConfigurationClientImpl(
pipeline == null ? createHttpPipeline(syncTokenPolicy) : pipeline,
SERIALIZER_ADAPTER,
endpoint,
serviceVersion.getVersion());
}
private HttpPipeline createHttpPipeline(SyncTokenPolicy syncTokenPolicy) {
Configuration buildConfiguration = (configuration == null)
? Configuration.getGlobalConfiguration()
: configuration;
String buildEndpoint = endpoint;
if (tokenCredential == null) {
buildEndpoint = getBuildEndpoint();
}
Objects.requireNonNull(buildEndpoint, "'Endpoint' is required and can not be null.");
final List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(
getApplicationId(clientOptions, httpLogOptions), CLIENT_NAME, CLIENT_VERSION, buildConfiguration));
policies.add(new RequestIdPolicy());
policies.add(new AddHeadersFromContextPolicy());
policies.add(ADD_HEADERS_POLICY);
policies.addAll(perCallPolicies);
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(ClientBuilderUtil.validateAndGetRetryPolicy(retryPolicy, retryOptions,
new RetryPolicy("retry-after-ms", ChronoUnit.MILLIS)));
policies.add(new AddDatePolicy());
if (tokenCredential != null) {
policies.add(
new BearerTokenAuthenticationPolicy(tokenCredential, String.format("%s/.default", buildEndpoint)));
} else if (credential != null) {
policies.add(new ConfigurationCredentialsPolicy(credential));
} else {
throw logger.logExceptionAsError(
new IllegalArgumentException("Missing credential information while building a client."));
}
policies.add(syncTokenPolicy);
policies.addAll(perRetryPolicies);
if (clientOptions != null) {
List<HttpHeader> httpHeaderList = new ArrayList<>();
clientOptions.getHeaders().forEach(
header -> httpHeaderList.add(new HttpHeader(header.getName(), header.getValue())));
policies.add(new AddHeadersPolicy(new HttpHeaders(httpHeaderList)));
}
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.httpClient(httpClient)
.build();
}
/**
* Sets the service endpoint for the Azure App Configuration instance.
*
* @param endpoint The URL of the Azure App Configuration instance.
* @return The updated ConfigurationClientBuilder object.
* @throws IllegalArgumentException If {@code endpoint} is null, or it cannot be parsed into a valid URL.
*/
@Override
public ConfigurationClientBuilder endpoint(String endpoint) {
try {
new URL(endpoint);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL", ex));
}
this.endpoint = endpoint;
return this;
}
/**
* Allows for setting common properties such as application ID, headers, proxy configuration, etc. Note that it is
* recommended that this method be called with an instance of the {@link HttpClientOptions}
* class (a subclass of the {@link ClientOptions} base class). The HttpClientOptions subclass provides more
* configuration options suitable for HTTP clients, which is applicable for any class that implements this HttpTrait
* interface.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param clientOptions A configured instance of {@link HttpClientOptions}.
* @see HttpClientOptions
* @return the updated ConfigurationClientBuilder object
*/
@Override
public ConfigurationClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential to use when authenticating HTTP requests. Also, sets the {@link
* for this ConfigurationClientBuilder.
*
* @param connectionString Connection string in the format "endpoint={endpoint_value};id={id_value};
* secret={secret_value}"
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code connectionString} is null.
* @throws IllegalArgumentException If {@code connectionString} is an empty string, the {@code connectionString}
* secret is invalid, or the HMAC-SHA256 MAC algorithm cannot be instantiated.
*/
@Override
public ConfigurationClientBuilder connectionString(String connectionString) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
if (connectionString.isEmpty()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'connectionString' cannot be an empty string."));
}
try {
this.credential = new ConfigurationClientCredentials(connectionString);
} catch (InvalidKeyException err) {
throw logger.logExceptionAsError(new IllegalArgumentException(
"The secret contained within the connection string is invalid and cannot instantiate the HMAC-SHA256"
+ " algorithm.", err));
} catch (NoSuchAlgorithmException err) {
throw logger.logExceptionAsError(
new IllegalArgumentException("HMAC-SHA256 MAC algorithm cannot be instantiated.", err));
}
this.endpoint = credential.getBaseUri();
return this;
}
/**
* Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java
* <a href="https:
* documentation for more details on proper usage of the {@link TokenCredential} type.
*
* @param tokenCredential {@link TokenCredential} used to authorize requests sent to the service.
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code credential} is null.
*/
@Override
public ConfigurationClientBuilder credential(TokenCredential tokenCredential) {
Objects.requireNonNull(tokenCredential);
this.tokenCredential = tokenCredential;
return this;
}
/**
* Sets the {@link HttpLogOptions logging configuration} to use when sending and receiving requests to and from
* the service. If a {@code logLevel} is not provided, default value of {@link HttpLogDetailLevel
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param logOptions The {@link HttpLogOptions logging configuration} to use when sending and receiving requests to
* and from the service.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder httpLogOptions(HttpLogOptions logOptions) {
httpLogOptions = logOptions;
return this;
}
/**
* Adds a {@link HttpPipelinePolicy pipeline policy} to apply on each request sent.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param policy A {@link HttpPipelinePolicy pipeline policy}.
* @return The updated ConfigurationClientBuilder object.
* @throws NullPointerException If {@code policy} is null.
*/
@Override
public ConfigurationClientBuilder addPolicy(HttpPipelinePolicy policy) {
Objects.requireNonNull(policy, "'policy' cannot be null.");
if (policy.getPipelinePosition() == HttpPipelinePosition.PER_CALL) {
perCallPolicies.add(policy);
} else {
perRetryPolicies.add(policy);
}
return this;
}
/**
* Sets the {@link HttpClient} to use for sending and receiving requests to and from the service.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
*
* @param client The {@link HttpClient} to use for requests.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder httpClient(HttpClient client) {
if (this.httpClient != null && client == null) {
logger.info("HttpClient is being set to 'null' when it was previously configured.");
}
this.httpClient = client;
return this;
}
/**
* Sets the {@link HttpPipeline} to use for the service client.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
* <p>
* The {@link
*
* @param pipeline {@link HttpPipeline} to use for sending service requests and receiving responses.
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder pipeline(HttpPipeline pipeline) {
if (this.pipeline != null && pipeline == null) {
logger.info("HttpPipeline is being set to 'null' when it was previously configured.");
}
this.pipeline = pipeline;
return this;
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* The default configuration store is a clone of the {@link Configuration
* configuration store}, use {@link Configuration
*
* @param configuration The configuration store used to
* @return The updated ConfigurationClientBuilder object.
*/
@Override
public ConfigurationClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets the {@link HttpPipelinePolicy} that is used to retry requests.
* <p>
* The default retry policy will be used if not provided {@link ConfigurationClientBuilder
* build {@link ConfigurationAsyncClient} or {@link ConfigurationClient}.
* <p>
* Setting this is mutually exclusive with using {@link
*
* @param retryPolicy The {@link HttpPipelinePolicy} that will be used to retry requests. For example,
* {@link RetryPolicy} can be used to retry requests.
*
* @return The updated ConfigurationClientBuilder object.
*/
public ConfigurationClientBuilder retryPolicy(HttpPipelinePolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
/**
* Sets the {@link RetryOptions} for all the requests made through the client.
*
* <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In
* particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and
* they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally
* based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this
* trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the
* documentation of types that implement this trait to understand the full set of implications.</p>
* <p>
* Setting this is mutually exclusive with using {@link
*
* @param retryOptions The {@link RetryOptions} to use for all the requests made through the client.
* @return The updated {@link ConfigurationClientBuilder} object.
*/
@Override
public ConfigurationClientBuilder retryOptions(RetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the {@link ConfigurationServiceVersion} that is used when making API requests.
* <p>
* If a service version is not provided, the service version that will be used will be the latest known service
* version based on the version of the client library being used. If no service version is specified, updating to a
* newer version the client library will have the result of potentially moving to a newer service version.
*
* @param version {@link ConfigurationServiceVersion} of the service to be used when making requests.
* @return The updated ConfigurationClientBuilder object.
*/
public ConfigurationClientBuilder serviceVersion(ConfigurationServiceVersion version) {
this.version = version;
return this;
}
private String getBuildEndpoint() {
if (endpoint != null) {
return endpoint;
} else if (credential != null) {
return credential.getBaseUri();
} else {
return null;
}
}
} |
Do we want to throw here or put the OutputStream into append mode? | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
AppendBlobRequestConditions requestConditions = null;
if (!overwrite) {
if (exists()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new AppendBlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
} else {
create(true);
}
return getBlobOutputStream(requestConditions);
} | if (exists()) { | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
AppendBlobRequestConditions requestConditions = null;
if (!overwrite) {
requestConditions = new AppendBlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
} else {
create(true);
}
return getBlobOutputStream(requestConditions);
} | class AppendBlobClient extends BlobClientBase {
private static final ClientLogger LOGGER = new ClientLogger(AppendBlobAsyncClient.class);
private final AppendBlobAsyncClient appendBlobAsyncClient;
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = AppendBlobAsyncClient.MAX_APPEND_BLOCK_BYTES;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = AppendBlobAsyncClient.MAX_BLOCKS;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*
* @param appendBlobAsyncClient the async append blob client
*/
AppendBlobClient(AppendBlobAsyncClient appendBlobAsyncClient) {
super(appendBlobAsyncClient);
this.appendBlobAsyncClient = appendBlobAsyncClient;
}
/**
* Creates a new {@link AppendBlobClient} with the specified {@code encryptionScope}.
*
* @param encryptionScope the encryption scope for the blob, pass {@code null} to use no encryption scope.
* @return a {@link AppendBlobClient} with the specified {@code encryptionScope}.
*/
@Override
public AppendBlobClient getEncryptionScopeClient(String encryptionScope) {
return new AppendBlobClient(appendBlobAsyncClient.getEncryptionScopeAsyncClient(encryptionScope));
}
/**
* Creates a new {@link AppendBlobClient} with the specified {@code customerProvidedKey}.
*
* @param customerProvidedKey the {@link CustomerProvidedKey} for the blob,
* pass {@code null} to use no customer provided key.
* @return a {@link AppendBlobClient} with the specified {@code customerProvidedKey}.
*/
@Override
public AppendBlobClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey) {
return new AppendBlobClient(appendBlobAsyncClient.getCustomerProvidedKeyAsyncClient(customerProvidedKey));
}
/**
* Creates and opens an output stream to write data to the append blob. If the blob already exists on the service,
* new data will get appended to the existing blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(null);
}
/**
* Creates and opens an output stream to write data to the append blob.
* <p>
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether an existing blob should be deleted and recreated, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the append blob. If the blob already exists on the service,
* it will be overwritten.
*
* @param requestConditions A {@link BlobRequestConditions} object that represents the access conditions for the
* blob.
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(AppendBlobRequestConditions requestConditions) {
return BlobOutputStream.appendBlobOutputStream(appendBlobAsyncClient, requestConditions);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.create -->
* <pre>
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.create -->
*
* @return The information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem create() {
return create(false);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.create
* <pre>
* boolean overwrite = false; &
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return The information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem create(boolean overwrite) {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions, null, Context.NONE).getValue();
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* BlobRequestConditions requestConditions = new BlobRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* System.out.printf&
* client.createWithResponse&
* .getLastModified&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob. If there is leading or trailing whitespace in any
* metadata key or value, it must be removed or encoded.
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions, Duration timeout, Context context) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions), timeout, context);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* Map<String, String> tags = Collections.singletonMap&
* BlobRequestConditions requestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* System.out.printf&
* client.createWithResponse&
* .setTags&
* .getLastModified&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createWithResponse(AppendBlobCreateOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(appendBlobAsyncClient.
createWithResponse(options, context), timeout);
}
/**
* Creates a 0-length append blob if it does not exist. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExists -->
* <pre>
* client.createIfNotExists&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExists -->
*
* @return {@link AppendBlobItem} containing information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem createIfNotExists() {
return createIfNotExistsWithResponse(new AppendBlobCreateOptions(), null, null).getValue();
}
/**
* Creates a 0-length append blob if it does not exist. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExistsWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* Map<String, String> tags = Collections.singletonMap&
* Context context = new Context&
*
* Response<AppendBlobItem> response = client.createIfNotExistsWithResponse&
* .setHeaders&
* if &
* System.out.println&
* &
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExistsWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A reactive response {@link Response} signaling completion, whose {@link Response
* contains the {@link AppendBlobItem} containing information about the append blob. If {@link Response}'s status
* code is 201, a new append blob was successfully created. If status code is 409, an append blob already existed at
* this location.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createIfNotExistsWithResponse(AppendBlobCreateOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(appendBlobAsyncClient.
createIfNotExistsWithResponse(options, context), timeout);
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlock
* <pre>
* System.out.printf&
* client.appendBlock&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlock
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider using {@link
* Alternatively, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem appendBlock(InputStream data, long length) {
return appendBlockWithResponse(data, length, null, null, null, Context.NONE).getValue();
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockWithResponse
* <pre>
* byte[] md5 = MessageDigest.getInstance&
* AppendBlobRequestConditions requestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockWithResponse&
* .getValue&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockWithResponse
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider using {@link
* Alternatively, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
* @throws UnexpectedLengthException when the length of data does not match the input {@code length}.
* @throws NullPointerException if the input data is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockWithResponse(InputStream data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Duration timeout, Context context) {
Objects.requireNonNull(data, "'data' cannot be null.");
Flux<ByteBuffer> fbb = Utility.convertStreamToByteBuffer(data, length, MAX_APPEND_BLOCK_BYTES, true);
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockWithResponse(fbb, length, contentMd5,
appendBlobRequestConditions, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrl
* <pre>
* System.out.printf&
* client.appendBlockFromUrl&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null, null, Context.NONE).getValue();
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
* <pre>
* AppendBlobRequestConditions appendBlobRequestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
*
* BlobRequestConditions modifiedRequestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
*
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockFromUrlWithResponse&
* appendBlobRequestConditions, modifiedRequestConditions, timeout,
* context&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMd5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMd5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions, Duration timeout, Context context) {
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockFromUrlWithResponse(
new AppendBlobAppendBlockFromUrlOptions(sourceUrl).setSourceRange(sourceRange)
.setSourceContentMd5(sourceContentMd5).setDestinationRequestConditions(destRequestConditions)
.setSourceRequestConditions(sourceRequestConditions), context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
* <pre>
* AppendBlobRequestConditions appendBlobRequestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
*
* BlobRequestConditions modifiedRequestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
*
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockFromUrlWithResponse&
* .setSourceRange&
* .setDestinationRequestConditions&
* .setSourceRequestConditions&
* context&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
*
* @param options options for the operation
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockFromUrlWithResponse(AppendBlobAppendBlockFromUrlOptions options, Duration timeout,
Context context) {
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockFromUrlWithResponse(
options, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.seal -->
* <pre>
* client.seal&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.seal -->
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void seal() {
sealWithResponse(new AppendBlobSealOptions(), null, Context.NONE);
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.sealWithResponse
* <pre>
* AppendBlobRequestConditions requestConditions = new AppendBlobRequestConditions&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* client.sealWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> sealWithResponse(AppendBlobSealOptions options, Duration timeout, Context context) {
Mono<Response<Void>> response = appendBlobAsyncClient.sealWithResponse(options, context);
return blockWithOptionalTimeout(response, timeout);
}
} | class AppendBlobClient extends BlobClientBase {
private final AppendBlobAsyncClient appendBlobAsyncClient;
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = AppendBlobAsyncClient.MAX_APPEND_BLOCK_BYTES;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = AppendBlobAsyncClient.MAX_BLOCKS;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*
* @param appendBlobAsyncClient the async append blob client
*/
AppendBlobClient(AppendBlobAsyncClient appendBlobAsyncClient) {
super(appendBlobAsyncClient);
this.appendBlobAsyncClient = appendBlobAsyncClient;
}
/**
* Creates a new {@link AppendBlobClient} with the specified {@code encryptionScope}.
*
* @param encryptionScope the encryption scope for the blob, pass {@code null} to use no encryption scope.
* @return a {@link AppendBlobClient} with the specified {@code encryptionScope}.
*/
@Override
public AppendBlobClient getEncryptionScopeClient(String encryptionScope) {
return new AppendBlobClient(appendBlobAsyncClient.getEncryptionScopeAsyncClient(encryptionScope));
}
/**
* Creates a new {@link AppendBlobClient} with the specified {@code customerProvidedKey}.
*
* @param customerProvidedKey the {@link CustomerProvidedKey} for the blob,
* pass {@code null} to use no customer provided key.
* @return a {@link AppendBlobClient} with the specified {@code customerProvidedKey}.
*/
@Override
public AppendBlobClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey) {
return new AppendBlobClient(appendBlobAsyncClient.getCustomerProvidedKeyAsyncClient(customerProvidedKey));
}
/**
* Creates and opens an output stream to write data to the append blob. If the blob already exists on the service,
* new data will get appended to the existing blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(null);
}
/**
* Creates and opens an output stream to write data to the append blob. If overwrite is specified {@code true},
* the existing blob will be deleted and recreated, should data exist on the blob. If overwrite is specified
* {@code false}, new data will get appended to the existing blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether an existing blob should be deleted and recreated, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the append blob.
*
* @param requestConditions A {@link BlobRequestConditions} object that represents the access conditions for the
* blob.
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(AppendBlobRequestConditions requestConditions) {
return BlobOutputStream.appendBlobOutputStream(appendBlobAsyncClient, requestConditions);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.create -->
* <pre>
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.create -->
*
* @return The information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem create() {
return create(false);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.create
* <pre>
* boolean overwrite = false; &
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return The information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem create(boolean overwrite) {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions, null, Context.NONE).getValue();
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* BlobRequestConditions requestConditions = new BlobRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* System.out.printf&
* client.createWithResponse&
* .getLastModified&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob. If there is leading or trailing whitespace in any
* metadata key or value, it must be removed or encoded.
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions, Duration timeout, Context context) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions), timeout, context);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* Map<String, String> tags = Collections.singletonMap&
* BlobRequestConditions requestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* System.out.printf&
* client.createWithResponse&
* .setTags&
* .getLastModified&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createWithResponse(AppendBlobCreateOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(appendBlobAsyncClient.
createWithResponse(options, context), timeout);
}
/**
* Creates a 0-length append blob if it does not exist. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExists -->
* <pre>
* client.createIfNotExists&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExists -->
*
* @return {@link AppendBlobItem} containing information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem createIfNotExists() {
return createIfNotExistsWithResponse(new AppendBlobCreateOptions(), null, null).getValue();
}
/**
* Creates a 0-length append blob if it does not exist. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExistsWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* Map<String, String> tags = Collections.singletonMap&
* Context context = new Context&
*
* Response<AppendBlobItem> response = client.createIfNotExistsWithResponse&
* .setHeaders&
* if &
* System.out.println&
* &
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExistsWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A reactive response {@link Response} signaling completion, whose {@link Response
* contains the {@link AppendBlobItem} containing information about the append blob. If {@link Response}'s status
* code is 201, a new append blob was successfully created. If status code is 409, an append blob already existed at
* this location.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createIfNotExistsWithResponse(AppendBlobCreateOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(appendBlobAsyncClient.
createIfNotExistsWithResponse(options, context), timeout);
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlock
* <pre>
* System.out.printf&
* client.appendBlock&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlock
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider using {@link
* Alternatively, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem appendBlock(InputStream data, long length) {
return appendBlockWithResponse(data, length, null, null, null, Context.NONE).getValue();
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockWithResponse
* <pre>
* byte[] md5 = MessageDigest.getInstance&
* AppendBlobRequestConditions requestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockWithResponse&
* .getValue&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockWithResponse
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider using {@link
* Alternatively, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
* @throws UnexpectedLengthException when the length of data does not match the input {@code length}.
* @throws NullPointerException if the input data is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockWithResponse(InputStream data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Duration timeout, Context context) {
Objects.requireNonNull(data, "'data' cannot be null.");
Flux<ByteBuffer> fbb = Utility.convertStreamToByteBuffer(data, length, MAX_APPEND_BLOCK_BYTES, true);
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockWithResponse(fbb, length, contentMd5,
appendBlobRequestConditions, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrl
* <pre>
* System.out.printf&
* client.appendBlockFromUrl&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null, null, Context.NONE).getValue();
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
* <pre>
* AppendBlobRequestConditions appendBlobRequestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
*
* BlobRequestConditions modifiedRequestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
*
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockFromUrlWithResponse&
* appendBlobRequestConditions, modifiedRequestConditions, timeout,
* context&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMd5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMd5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions, Duration timeout, Context context) {
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockFromUrlWithResponse(
new AppendBlobAppendBlockFromUrlOptions(sourceUrl).setSourceRange(sourceRange)
.setSourceContentMd5(sourceContentMd5).setDestinationRequestConditions(destRequestConditions)
.setSourceRequestConditions(sourceRequestConditions), context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
* <pre>
* AppendBlobRequestConditions appendBlobRequestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
*
* BlobRequestConditions modifiedRequestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
*
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockFromUrlWithResponse&
* .setSourceRange&
* .setDestinationRequestConditions&
* .setSourceRequestConditions&
* context&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
*
* @param options options for the operation
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockFromUrlWithResponse(AppendBlobAppendBlockFromUrlOptions options, Duration timeout,
Context context) {
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockFromUrlWithResponse(
options, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.seal -->
* <pre>
* client.seal&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.seal -->
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void seal() {
sealWithResponse(new AppendBlobSealOptions(), null, Context.NONE);
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.sealWithResponse
* <pre>
* AppendBlobRequestConditions requestConditions = new AppendBlobRequestConditions&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* client.sealWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> sealWithResponse(AppendBlobSealOptions options, Duration timeout, Context context) {
Mono<Response<Void>> response = appendBlobAsyncClient.sealWithResponse(options, context);
return blockWithOptionalTimeout(response, timeout);
}
} |
So it looks like in [.NET](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/storage/Azure.Storage.Blobs/src/AppendBlobClient.cs#L1984) when overwrite is false and blob exists, they continue to append. We should probably follow the same logic. | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
AppendBlobRequestConditions requestConditions = null;
if (!overwrite) {
if (exists()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(Constants.BLOB_ALREADY_EXISTS));
}
requestConditions = new AppendBlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
} else {
create(true);
}
return getBlobOutputStream(requestConditions);
} | if (exists()) { | public BlobOutputStream getBlobOutputStream(boolean overwrite) {
AppendBlobRequestConditions requestConditions = null;
if (!overwrite) {
requestConditions = new AppendBlobRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
} else {
create(true);
}
return getBlobOutputStream(requestConditions);
} | class AppendBlobClient extends BlobClientBase {
private static final ClientLogger LOGGER = new ClientLogger(AppendBlobAsyncClient.class);
private final AppendBlobAsyncClient appendBlobAsyncClient;
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = AppendBlobAsyncClient.MAX_APPEND_BLOCK_BYTES;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = AppendBlobAsyncClient.MAX_BLOCKS;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*
* @param appendBlobAsyncClient the async append blob client
*/
AppendBlobClient(AppendBlobAsyncClient appendBlobAsyncClient) {
super(appendBlobAsyncClient);
this.appendBlobAsyncClient = appendBlobAsyncClient;
}
/**
* Creates a new {@link AppendBlobClient} with the specified {@code encryptionScope}.
*
* @param encryptionScope the encryption scope for the blob, pass {@code null} to use no encryption scope.
* @return a {@link AppendBlobClient} with the specified {@code encryptionScope}.
*/
@Override
public AppendBlobClient getEncryptionScopeClient(String encryptionScope) {
return new AppendBlobClient(appendBlobAsyncClient.getEncryptionScopeAsyncClient(encryptionScope));
}
/**
* Creates a new {@link AppendBlobClient} with the specified {@code customerProvidedKey}.
*
* @param customerProvidedKey the {@link CustomerProvidedKey} for the blob,
* pass {@code null} to use no customer provided key.
* @return a {@link AppendBlobClient} with the specified {@code customerProvidedKey}.
*/
@Override
public AppendBlobClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey) {
return new AppendBlobClient(appendBlobAsyncClient.getCustomerProvidedKeyAsyncClient(customerProvidedKey));
}
/**
* Creates and opens an output stream to write data to the append blob. If the blob already exists on the service,
* new data will get appended to the existing blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(null);
}
/**
* Creates and opens an output stream to write data to the append blob.
* <p>
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether an existing blob should be deleted and recreated, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the append blob. If the blob already exists on the service,
* it will be overwritten.
*
* @param requestConditions A {@link BlobRequestConditions} object that represents the access conditions for the
* blob.
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(AppendBlobRequestConditions requestConditions) {
return BlobOutputStream.appendBlobOutputStream(appendBlobAsyncClient, requestConditions);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.create -->
* <pre>
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.create -->
*
* @return The information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem create() {
return create(false);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.create
* <pre>
* boolean overwrite = false; &
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return The information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem create(boolean overwrite) {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions, null, Context.NONE).getValue();
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* BlobRequestConditions requestConditions = new BlobRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* System.out.printf&
* client.createWithResponse&
* .getLastModified&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob. If there is leading or trailing whitespace in any
* metadata key or value, it must be removed or encoded.
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions, Duration timeout, Context context) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions), timeout, context);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* Map<String, String> tags = Collections.singletonMap&
* BlobRequestConditions requestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* System.out.printf&
* client.createWithResponse&
* .setTags&
* .getLastModified&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createWithResponse(AppendBlobCreateOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(appendBlobAsyncClient.
createWithResponse(options, context), timeout);
}
/**
* Creates a 0-length append blob if it does not exist. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExists -->
* <pre>
* client.createIfNotExists&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExists -->
*
* @return {@link AppendBlobItem} containing information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem createIfNotExists() {
return createIfNotExistsWithResponse(new AppendBlobCreateOptions(), null, null).getValue();
}
/**
* Creates a 0-length append blob if it does not exist. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExistsWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* Map<String, String> tags = Collections.singletonMap&
* Context context = new Context&
*
* Response<AppendBlobItem> response = client.createIfNotExistsWithResponse&
* .setHeaders&
* if &
* System.out.println&
* &
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExistsWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A reactive response {@link Response} signaling completion, whose {@link Response
* contains the {@link AppendBlobItem} containing information about the append blob. If {@link Response}'s status
* code is 201, a new append blob was successfully created. If status code is 409, an append blob already existed at
* this location.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createIfNotExistsWithResponse(AppendBlobCreateOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(appendBlobAsyncClient.
createIfNotExistsWithResponse(options, context), timeout);
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlock
* <pre>
* System.out.printf&
* client.appendBlock&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlock
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider using {@link
* Alternatively, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem appendBlock(InputStream data, long length) {
return appendBlockWithResponse(data, length, null, null, null, Context.NONE).getValue();
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockWithResponse
* <pre>
* byte[] md5 = MessageDigest.getInstance&
* AppendBlobRequestConditions requestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockWithResponse&
* .getValue&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockWithResponse
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider using {@link
* Alternatively, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
* @throws UnexpectedLengthException when the length of data does not match the input {@code length}.
* @throws NullPointerException if the input data is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockWithResponse(InputStream data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Duration timeout, Context context) {
Objects.requireNonNull(data, "'data' cannot be null.");
Flux<ByteBuffer> fbb = Utility.convertStreamToByteBuffer(data, length, MAX_APPEND_BLOCK_BYTES, true);
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockWithResponse(fbb, length, contentMd5,
appendBlobRequestConditions, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrl
* <pre>
* System.out.printf&
* client.appendBlockFromUrl&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null, null, Context.NONE).getValue();
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
* <pre>
* AppendBlobRequestConditions appendBlobRequestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
*
* BlobRequestConditions modifiedRequestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
*
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockFromUrlWithResponse&
* appendBlobRequestConditions, modifiedRequestConditions, timeout,
* context&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMd5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMd5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions, Duration timeout, Context context) {
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockFromUrlWithResponse(
new AppendBlobAppendBlockFromUrlOptions(sourceUrl).setSourceRange(sourceRange)
.setSourceContentMd5(sourceContentMd5).setDestinationRequestConditions(destRequestConditions)
.setSourceRequestConditions(sourceRequestConditions), context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
* <pre>
* AppendBlobRequestConditions appendBlobRequestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
*
* BlobRequestConditions modifiedRequestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
*
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockFromUrlWithResponse&
* .setSourceRange&
* .setDestinationRequestConditions&
* .setSourceRequestConditions&
* context&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
*
* @param options options for the operation
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockFromUrlWithResponse(AppendBlobAppendBlockFromUrlOptions options, Duration timeout,
Context context) {
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockFromUrlWithResponse(
options, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.seal -->
* <pre>
* client.seal&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.seal -->
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void seal() {
sealWithResponse(new AppendBlobSealOptions(), null, Context.NONE);
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.sealWithResponse
* <pre>
* AppendBlobRequestConditions requestConditions = new AppendBlobRequestConditions&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* client.sealWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> sealWithResponse(AppendBlobSealOptions options, Duration timeout, Context context) {
Mono<Response<Void>> response = appendBlobAsyncClient.sealWithResponse(options, context);
return blockWithOptionalTimeout(response, timeout);
}
} | class AppendBlobClient extends BlobClientBase {
private final AppendBlobAsyncClient appendBlobAsyncClient;
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = AppendBlobAsyncClient.MAX_APPEND_BLOCK_BYTES;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = AppendBlobAsyncClient.MAX_BLOCKS;
/**
* Package-private constructor for use by {@link BlobClientBuilder}.
*
* @param appendBlobAsyncClient the async append blob client
*/
AppendBlobClient(AppendBlobAsyncClient appendBlobAsyncClient) {
super(appendBlobAsyncClient);
this.appendBlobAsyncClient = appendBlobAsyncClient;
}
/**
* Creates a new {@link AppendBlobClient} with the specified {@code encryptionScope}.
*
* @param encryptionScope the encryption scope for the blob, pass {@code null} to use no encryption scope.
* @return a {@link AppendBlobClient} with the specified {@code encryptionScope}.
*/
@Override
public AppendBlobClient getEncryptionScopeClient(String encryptionScope) {
return new AppendBlobClient(appendBlobAsyncClient.getEncryptionScopeAsyncClient(encryptionScope));
}
/**
* Creates a new {@link AppendBlobClient} with the specified {@code customerProvidedKey}.
*
* @param customerProvidedKey the {@link CustomerProvidedKey} for the blob,
* pass {@code null} to use no customer provided key.
* @return a {@link AppendBlobClient} with the specified {@code customerProvidedKey}.
*/
@Override
public AppendBlobClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey) {
return new AppendBlobClient(appendBlobAsyncClient.getCustomerProvidedKeyAsyncClient(customerProvidedKey));
}
/**
* Creates and opens an output stream to write data to the append blob. If the blob already exists on the service,
* new data will get appended to the existing blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream() {
return getBlobOutputStream(null);
}
/**
* Creates and opens an output stream to write data to the append blob. If overwrite is specified {@code true},
* the existing blob will be deleted and recreated, should data exist on the blob. If overwrite is specified
* {@code false}, new data will get appended to the existing blob.
*
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @param overwrite Whether an existing blob should be deleted and recreated, should data exist on the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
/**
* Creates and opens an output stream to write data to the append blob.
*
* @param requestConditions A {@link BlobRequestConditions} object that represents the access conditions for the
* blob.
* @return A {@link BlobOutputStream} object used to write data to the blob.
* @throws BlobStorageException If a storage service error occurred.
*/
public BlobOutputStream getBlobOutputStream(AppendBlobRequestConditions requestConditions) {
return BlobOutputStream.appendBlobOutputStream(appendBlobAsyncClient, requestConditions);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.create -->
* <pre>
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.create -->
*
* @return The information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem create() {
return create(false);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.create
* <pre>
* boolean overwrite = false; &
* System.out.printf&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return The information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem create(boolean overwrite) {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions, null, Context.NONE).getValue();
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* BlobRequestConditions requestConditions = new BlobRequestConditions&
* .setLeaseId&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* System.out.printf&
* client.createWithResponse&
* .getLastModified&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob. If there is leading or trailing whitespace in any
* metadata key or value, it must be removed or encoded.
* @param requestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions, Duration timeout, Context context) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions), timeout, context);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* Map<String, String> tags = Collections.singletonMap&
* BlobRequestConditions requestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* System.out.printf&
* client.createWithResponse&
* .setTags&
* .getLastModified&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createWithResponse(AppendBlobCreateOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(appendBlobAsyncClient.
createWithResponse(options, context), timeout);
}
/**
* Creates a 0-length append blob if it does not exist. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExists -->
* <pre>
* client.createIfNotExists&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExists -->
*
* @return {@link AppendBlobItem} containing information of the created appended blob.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem createIfNotExists() {
return createIfNotExistsWithResponse(new AppendBlobCreateOptions(), null, null).getValue();
}
/**
* Creates a 0-length append blob if it does not exist. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExistsWithResponse
* <pre>
* BlobHttpHeaders headers = new BlobHttpHeaders&
* .setContentType&
* .setContentLanguage&
* Map<String, String> metadata = Collections.singletonMap&
* Map<String, String> tags = Collections.singletonMap&
* Context context = new Context&
*
* Response<AppendBlobItem> response = client.createIfNotExistsWithResponse&
* .setHeaders&
* if &
* System.out.println&
* &
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.createIfNotExistsWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A reactive response {@link Response} signaling completion, whose {@link Response
* contains the {@link AppendBlobItem} containing information about the append blob. If {@link Response}'s status
* code is 201, a new append blob was successfully created. If status code is 409, an append blob already existed at
* this location.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> createIfNotExistsWithResponse(AppendBlobCreateOptions options, Duration timeout,
Context context) {
return StorageImplUtils.blockWithOptionalTimeout(appendBlobAsyncClient.
createIfNotExistsWithResponse(options, context), timeout);
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlock
* <pre>
* System.out.printf&
* client.appendBlock&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlock
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider using {@link
* Alternatively, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem appendBlock(InputStream data, long length) {
return appendBlockWithResponse(data, length, null, null, null, Context.NONE).getValue();
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockWithResponse
* <pre>
* byte[] md5 = MessageDigest.getInstance&
* AppendBlobRequestConditions requestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockWithResponse&
* .getValue&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockWithResponse
*
* @param data The data to write to the blob. The data must be markable. This is in order to support retries. If
* the data is not markable, consider using {@link
* Alternatively, consider wrapping your data source in a {@link java.io.BufferedInputStream} to add mark support.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A {@link Response} whose {@link Response
* @throws UnexpectedLengthException when the length of data does not match the input {@code length}.
* @throws NullPointerException if the input data is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockWithResponse(InputStream data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Duration timeout, Context context) {
Objects.requireNonNull(data, "'data' cannot be null.");
Flux<ByteBuffer> fbb = Utility.convertStreamToByteBuffer(data, length, MAX_APPEND_BLOCK_BYTES, true);
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockWithResponse(fbb, length, contentMd5,
appendBlobRequestConditions, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrl
* <pre>
* System.out.printf&
* client.appendBlockFromUrl&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AppendBlobItem appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null, null, Context.NONE).getValue();
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
* <pre>
* AppendBlobRequestConditions appendBlobRequestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
*
* BlobRequestConditions modifiedRequestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
*
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockFromUrlWithResponse&
* appendBlobRequestConditions, modifiedRequestConditions, timeout,
* context&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMd5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMd5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions, Duration timeout, Context context) {
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockFromUrlWithResponse(
new AppendBlobAppendBlockFromUrlOptions(sourceUrl).setSourceRange(sourceRange)
.setSourceContentMd5(sourceContentMd5).setDestinationRequestConditions(destRequestConditions)
.setSourceRequestConditions(sourceRequestConditions), context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
* <pre>
* AppendBlobRequestConditions appendBlobRequestConditions = new AppendBlobRequestConditions&
* .setAppendPosition&
* .setMaxSize&
*
* BlobRequestConditions modifiedRequestConditions = new BlobRequestConditions&
* .setIfUnmodifiedSince&
*
* Context context = new Context&
*
* System.out.printf&
* client.appendBlockFromUrlWithResponse&
* .setSourceRange&
* .setDestinationRequestConditions&
* .setSourceRequestConditions&
* context&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.appendBlockFromUrlWithResponse
*
* @param options options for the operation
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return The information of the append blob operation.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AppendBlobItem> appendBlockFromUrlWithResponse(AppendBlobAppendBlockFromUrlOptions options, Duration timeout,
Context context) {
Mono<Response<AppendBlobItem>> response = appendBlobAsyncClient.appendBlockFromUrlWithResponse(
options, context);
return StorageImplUtils.blockWithOptionalTimeout(response, timeout);
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.seal -->
* <pre>
* client.seal&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.seal -->
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void seal() {
sealWithResponse(new AppendBlobSealOptions(), null, Context.NONE);
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* <!-- src_embed com.azure.storage.blob.specialized.AppendBlobClient.sealWithResponse
* <pre>
* AppendBlobRequestConditions requestConditions = new AppendBlobRequestConditions&
* .setIfUnmodifiedSince&
* Context context = new Context&
*
* client.sealWithResponse&
* System.out.println&
* </pre>
* <!-- end com.azure.storage.blob.specialized.AppendBlobClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional context that is passed through the Http pipeline during the service call.
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> sealWithResponse(AppendBlobSealOptions options, Duration timeout, Context context) {
Mono<Response<Void>> response = appendBlobAsyncClient.sealWithResponse(options, context);
return blockWithOptionalTimeout(response, timeout);
}
} |
@srnagar Any recommendation on how we'd best serialize/de-serialize these complex data in samples (I know we'd not want to show Jackson, but do we have other alternatives)? Jonathan once recommended text blocks, but that's in Java 15. | public void createWorkflow(){
BinaryData workflowCreateOrUpdateCommand =
BinaryData.fromString(
"{\"name\":\"Create glossary term workflow\",\"description\":\"\",\"triggers\":[{\"type\":\"when_term_creation_is_requested\",\"underGlossaryHierarchy\":\"/glossaries/20031e20-b4df-4a66-a61d-1b0716f3fa48\"}],\"isEnabled\":true,\"actionDag\":{\"actions\":{\"Startandwaitforanapproval\":{\"type\":\"Approval\",\"inputs\":{\"parameters\":{\"approvalType\":\"PendingOnAll\",\"title\":\"ApprovalRequestforCreateGlossaryTerm\",\"assignedTo\":[\"eece94d9-0619-4669-bb8a-d6ecec5220bc\"]}},\"runAfter\":{}},\"Condition\":{\"type\":\"If\",\"expression\":{\"and\":[{\"equals\":[\"@outputs('Startandwaitforanapproval')['body/outcome']\",\"Approved\"]}]},\"actions\":{\"Createglossaryterm\":{\"type\":\"CreateTerm\",\"runAfter\":{}},\"Sendemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-APPROVED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isapproved.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{\"Createglossaryterm\":[\"Succeeded\"]}}},\"else\":{\"actions\":{\"Sendrejectemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-REJECTED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isrejected.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{}}}},\"runAfter\":{\"Startandwaitforanapproval\":[\"Succeeded\"]}}}}}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.createOrReplaceWorkflowWithResponse(
"4afb5752-e47f-43a1-8ba7-c696bf8d2745", workflowCreateOrUpdateCommand, requestOptions);
} | "{\"name\":\"Create glossary term workflow\",\"description\":\"\",\"triggers\":[{\"type\":\"when_term_creation_is_requested\",\"underGlossaryHierarchy\":\"/glossaries/20031e20-b4df-4a66-a61d-1b0716f3fa48\"}],\"isEnabled\":true,\"actionDag\":{\"actions\":{\"Startandwaitforanapproval\":{\"type\":\"Approval\",\"inputs\":{\"parameters\":{\"approvalType\":\"PendingOnAll\",\"title\":\"ApprovalRequestforCreateGlossaryTerm\",\"assignedTo\":[\"eece94d9-0619-4669-bb8a-d6ecec5220bc\"]}},\"runAfter\":{}},\"Condition\":{\"type\":\"If\",\"expression\":{\"and\":[{\"equals\":[\"@outputs('Startandwaitforanapproval')['body/outcome']\",\"Approved\"]}]},\"actions\":{\"Createglossaryterm\":{\"type\":\"CreateTerm\",\"runAfter\":{}},\"Sendemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-APPROVED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isapproved.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{\"Createglossaryterm\":[\"Succeeded\"]}}},\"else\":{\"actions\":{\"Sendrejectemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-REJECTED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isrejected.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{}}}},\"runAfter\":{\"Startandwaitforanapproval\":[\"Succeeded\"]}}}}}"); | public void createWorkflow() {
BinaryData workflowCreateOrUpdateCommand =
BinaryData.fromString(
"{\"name\":\"Create glossary term workflow\",\"description\":\"\",\"triggers\":[{\"type\":\"when_term_creation_is_requested\",\"underGlossaryHierarchy\":\"/glossaries/20031e20-b4df-4a66-a61d-1b0716f3fa48\"}],\"isEnabled\":true,\"actionDag\":{\"actions\":{\"Startandwaitforanapproval\":{\"type\":\"Approval\",\"inputs\":{\"parameters\":{\"approvalType\":\"PendingOnAll\",\"title\":\"ApprovalRequestforCreateGlossaryTerm\",\"assignedTo\":[\"eece94d9-0619-4669-bb8a-d6ecec5220bc\"]}},\"runAfter\":{}},\"Condition\":{\"type\":\"If\",\"expression\":{\"and\":[{\"equals\":[\"@outputs('Startandwaitforanapproval')['body/outcome']\",\"Approved\"]}]},\"actions\":{\"Createglossaryterm\":{\"type\":\"CreateTerm\",\"runAfter\":{}},\"Sendemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-APPROVED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isapproved.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{\"Createglossaryterm\":[\"Succeeded\"]}}},\"else\":{\"actions\":{\"Sendrejectemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-REJECTED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isrejected.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{}}}},\"runAfter\":{\"Startandwaitforanapproval\":[\"Succeeded\"]}}}}}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.createOrReplaceWorkflowWithResponse(
"4afb5752-e47f-43a1-8ba7-c696bf8d2745", workflowCreateOrUpdateCommand, requestOptions);
} | class ReadmeSamples {
PurviewWorkflowClient purviewWorkflowClient = new PurviewWorkflowClientBuilder()
.endpoint(Configuration.getGlobalConfiguration().get("ENDPOINT", "endpoint"))
.credential(new UsernamePasswordCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("CLIENTID", "clientId"))
.tenantId(Configuration.getGlobalConfiguration().get("TENANTID", "tenantId"))
.username(Configuration.getGlobalConfiguration().get("USERNAME", "username"))
.password(Configuration.getGlobalConfiguration().get("PASSWORD", "password"))
.build())
.buildClient();
public void submitUserRequests(){
BinaryData userRequestsPayload =
BinaryData.fromString(
"{\"comment\":\"Thanks!\",\"operations\":[{\"type\":\"CreateTerm\",\"payload\":{\"glossaryTerm\":{\"name\":\"term\",\"anchor\":{\"glossaryGuid\":\"20031e20-b4df-4a66-a61d-1b0716f3fa48\"},\"nickName\":\"term\",\"status\":\"Approved\"}}}]}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.submitUserRequestsWithResponse(userRequestsPayload, requestOptions);
}
public void approveWorkflowTask(){
BinaryData approvalResponseComment = BinaryData.fromString("{\"comment\":\"Thanks for raising this!\"}");
RequestOptions requestOptions = new RequestOptions();
Response<Void> response =
purviewWorkflowClient.approveApprovalTaskWithResponse(
"69b57a00-f5de-4a17-a44a-6479adae373d", approvalResponseComment, requestOptions);
}
} | class ReadmeSamples {
PurviewWorkflowClient purviewWorkflowClient = new PurviewWorkflowClientBuilder()
.endpoint(Configuration.getGlobalConfiguration().get("ENDPOINT", "endpoint"))
.credential(new UsernamePasswordCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("CLIENTID", "clientId"))
.tenantId(Configuration.getGlobalConfiguration().get("TENANTID", "tenantId"))
.username(Configuration.getGlobalConfiguration().get("USERNAME", "username"))
.password(Configuration.getGlobalConfiguration().get("PASSWORD", "password"))
.build())
.buildClient();
public void submitUserRequests() {
BinaryData userRequestsPayload =
BinaryData.fromString(
"{\"comment\":\"Thanks!\",\"operations\":[{\"type\":\"CreateTerm\",\"payload\":{\"glossaryTerm\":{\"name\":\"term\",\"anchor\":{\"glossaryGuid\":\"20031e20-b4df-4a66-a61d-1b0716f3fa48\"},\"nickName\":\"term\",\"status\":\"Approved\"}}}]}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.submitUserRequestsWithResponse(userRequestsPayload, requestOptions);
}
public void approveWorkflowTask() {
BinaryData approvalResponseComment = BinaryData.fromString("{\"comment\":\"Thanks for raising this!\"}");
RequestOptions requestOptions = new RequestOptions();
Response<Void> response =
purviewWorkflowClient.approveApprovalTaskWithResponse(
"69b57a00-f5de-4a17-a44a-6479adae373d", approvalResponseComment, requestOptions);
}
} |
As sample, recommend to use named variables, so that your sample would be similar to what customer uses the SDK. E.g. ```java String workflowId = ""; Response<BinaryData> response = purviewWorkflowClient.createOrReplaceWorkflowWithResponse(workflowId, ...); ``` | public void createWorkflow(){
BinaryData workflowCreateOrUpdateCommand =
BinaryData.fromString(
"{\"name\":\"Create glossary term workflow\",\"description\":\"\",\"triggers\":[{\"type\":\"when_term_creation_is_requested\",\"underGlossaryHierarchy\":\"/glossaries/20031e20-b4df-4a66-a61d-1b0716f3fa48\"}],\"isEnabled\":true,\"actionDag\":{\"actions\":{\"Startandwaitforanapproval\":{\"type\":\"Approval\",\"inputs\":{\"parameters\":{\"approvalType\":\"PendingOnAll\",\"title\":\"ApprovalRequestforCreateGlossaryTerm\",\"assignedTo\":[\"eece94d9-0619-4669-bb8a-d6ecec5220bc\"]}},\"runAfter\":{}},\"Condition\":{\"type\":\"If\",\"expression\":{\"and\":[{\"equals\":[\"@outputs('Startandwaitforanapproval')['body/outcome']\",\"Approved\"]}]},\"actions\":{\"Createglossaryterm\":{\"type\":\"CreateTerm\",\"runAfter\":{}},\"Sendemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-APPROVED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isapproved.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{\"Createglossaryterm\":[\"Succeeded\"]}}},\"else\":{\"actions\":{\"Sendrejectemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-REJECTED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isrejected.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{}}}},\"runAfter\":{\"Startandwaitforanapproval\":[\"Succeeded\"]}}}}}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.createOrReplaceWorkflowWithResponse(
"4afb5752-e47f-43a1-8ba7-c696bf8d2745", workflowCreateOrUpdateCommand, requestOptions);
} | "4afb5752-e47f-43a1-8ba7-c696bf8d2745", workflowCreateOrUpdateCommand, requestOptions); | public void createWorkflow() {
BinaryData workflowCreateOrUpdateCommand =
BinaryData.fromString(
"{\"name\":\"Create glossary term workflow\",\"description\":\"\",\"triggers\":[{\"type\":\"when_term_creation_is_requested\",\"underGlossaryHierarchy\":\"/glossaries/20031e20-b4df-4a66-a61d-1b0716f3fa48\"}],\"isEnabled\":true,\"actionDag\":{\"actions\":{\"Startandwaitforanapproval\":{\"type\":\"Approval\",\"inputs\":{\"parameters\":{\"approvalType\":\"PendingOnAll\",\"title\":\"ApprovalRequestforCreateGlossaryTerm\",\"assignedTo\":[\"eece94d9-0619-4669-bb8a-d6ecec5220bc\"]}},\"runAfter\":{}},\"Condition\":{\"type\":\"If\",\"expression\":{\"and\":[{\"equals\":[\"@outputs('Startandwaitforanapproval')['body/outcome']\",\"Approved\"]}]},\"actions\":{\"Createglossaryterm\":{\"type\":\"CreateTerm\",\"runAfter\":{}},\"Sendemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-APPROVED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isapproved.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{\"Createglossaryterm\":[\"Succeeded\"]}}},\"else\":{\"actions\":{\"Sendrejectemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-REJECTED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isrejected.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{}}}},\"runAfter\":{\"Startandwaitforanapproval\":[\"Succeeded\"]}}}}}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.createOrReplaceWorkflowWithResponse(
"4afb5752-e47f-43a1-8ba7-c696bf8d2745", workflowCreateOrUpdateCommand, requestOptions);
} | class ReadmeSamples {
PurviewWorkflowClient purviewWorkflowClient = new PurviewWorkflowClientBuilder()
.endpoint(Configuration.getGlobalConfiguration().get("ENDPOINT", "endpoint"))
.credential(new UsernamePasswordCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("CLIENTID", "clientId"))
.tenantId(Configuration.getGlobalConfiguration().get("TENANTID", "tenantId"))
.username(Configuration.getGlobalConfiguration().get("USERNAME", "username"))
.password(Configuration.getGlobalConfiguration().get("PASSWORD", "password"))
.build())
.buildClient();
public void submitUserRequests(){
BinaryData userRequestsPayload =
BinaryData.fromString(
"{\"comment\":\"Thanks!\",\"operations\":[{\"type\":\"CreateTerm\",\"payload\":{\"glossaryTerm\":{\"name\":\"term\",\"anchor\":{\"glossaryGuid\":\"20031e20-b4df-4a66-a61d-1b0716f3fa48\"},\"nickName\":\"term\",\"status\":\"Approved\"}}}]}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.submitUserRequestsWithResponse(userRequestsPayload, requestOptions);
}
public void approveWorkflowTask(){
BinaryData approvalResponseComment = BinaryData.fromString("{\"comment\":\"Thanks for raising this!\"}");
RequestOptions requestOptions = new RequestOptions();
Response<Void> response =
purviewWorkflowClient.approveApprovalTaskWithResponse(
"69b57a00-f5de-4a17-a44a-6479adae373d", approvalResponseComment, requestOptions);
}
} | class ReadmeSamples {
PurviewWorkflowClient purviewWorkflowClient = new PurviewWorkflowClientBuilder()
.endpoint(Configuration.getGlobalConfiguration().get("ENDPOINT", "endpoint"))
.credential(new UsernamePasswordCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("CLIENTID", "clientId"))
.tenantId(Configuration.getGlobalConfiguration().get("TENANTID", "tenantId"))
.username(Configuration.getGlobalConfiguration().get("USERNAME", "username"))
.password(Configuration.getGlobalConfiguration().get("PASSWORD", "password"))
.build())
.buildClient();
public void submitUserRequests() {
BinaryData userRequestsPayload =
BinaryData.fromString(
"{\"comment\":\"Thanks!\",\"operations\":[{\"type\":\"CreateTerm\",\"payload\":{\"glossaryTerm\":{\"name\":\"term\",\"anchor\":{\"glossaryGuid\":\"20031e20-b4df-4a66-a61d-1b0716f3fa48\"},\"nickName\":\"term\",\"status\":\"Approved\"}}}]}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.submitUserRequestsWithResponse(userRequestsPayload, requestOptions);
}
public void approveWorkflowTask() {
BinaryData approvalResponseComment = BinaryData.fromString("{\"comment\":\"Thanks for raising this!\"}");
RequestOptions requestOptions = new RequestOptions();
Response<Void> response =
purviewWorkflowClient.approveApprovalTaskWithResponse(
"69b57a00-f5de-4a17-a44a-6479adae373d", approvalResponseComment, requestOptions);
}
} |
Fixed, thanks | public void createWorkflow(){
BinaryData workflowCreateOrUpdateCommand =
BinaryData.fromString(
"{\"name\":\"Create glossary term workflow\",\"description\":\"\",\"triggers\":[{\"type\":\"when_term_creation_is_requested\",\"underGlossaryHierarchy\":\"/glossaries/20031e20-b4df-4a66-a61d-1b0716f3fa48\"}],\"isEnabled\":true,\"actionDag\":{\"actions\":{\"Startandwaitforanapproval\":{\"type\":\"Approval\",\"inputs\":{\"parameters\":{\"approvalType\":\"PendingOnAll\",\"title\":\"ApprovalRequestforCreateGlossaryTerm\",\"assignedTo\":[\"eece94d9-0619-4669-bb8a-d6ecec5220bc\"]}},\"runAfter\":{}},\"Condition\":{\"type\":\"If\",\"expression\":{\"and\":[{\"equals\":[\"@outputs('Startandwaitforanapproval')['body/outcome']\",\"Approved\"]}]},\"actions\":{\"Createglossaryterm\":{\"type\":\"CreateTerm\",\"runAfter\":{}},\"Sendemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-APPROVED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isapproved.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{\"Createglossaryterm\":[\"Succeeded\"]}}},\"else\":{\"actions\":{\"Sendrejectemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-REJECTED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isrejected.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{}}}},\"runAfter\":{\"Startandwaitforanapproval\":[\"Succeeded\"]}}}}}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.createOrReplaceWorkflowWithResponse(
"4afb5752-e47f-43a1-8ba7-c696bf8d2745", workflowCreateOrUpdateCommand, requestOptions);
} | "4afb5752-e47f-43a1-8ba7-c696bf8d2745", workflowCreateOrUpdateCommand, requestOptions); | public void createWorkflow() {
BinaryData workflowCreateOrUpdateCommand =
BinaryData.fromString(
"{\"name\":\"Create glossary term workflow\",\"description\":\"\",\"triggers\":[{\"type\":\"when_term_creation_is_requested\",\"underGlossaryHierarchy\":\"/glossaries/20031e20-b4df-4a66-a61d-1b0716f3fa48\"}],\"isEnabled\":true,\"actionDag\":{\"actions\":{\"Startandwaitforanapproval\":{\"type\":\"Approval\",\"inputs\":{\"parameters\":{\"approvalType\":\"PendingOnAll\",\"title\":\"ApprovalRequestforCreateGlossaryTerm\",\"assignedTo\":[\"eece94d9-0619-4669-bb8a-d6ecec5220bc\"]}},\"runAfter\":{}},\"Condition\":{\"type\":\"If\",\"expression\":{\"and\":[{\"equals\":[\"@outputs('Startandwaitforanapproval')['body/outcome']\",\"Approved\"]}]},\"actions\":{\"Createglossaryterm\":{\"type\":\"CreateTerm\",\"runAfter\":{}},\"Sendemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-APPROVED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isapproved.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{\"Createglossaryterm\":[\"Succeeded\"]}}},\"else\":{\"actions\":{\"Sendrejectemailnotification\":{\"type\":\"EmailNotification\",\"inputs\":{\"parameters\":{\"emailSubject\":\"GlossaryTermCreate-REJECTED\",\"emailMessage\":\"YourrequestforGlossaryTerm@{triggerBody()['request']['term']['name']}isrejected.\",\"emailRecipients\":[\"@{triggerBody()['request']['requestor']}\"]}},\"runAfter\":{}}}},\"runAfter\":{\"Startandwaitforanapproval\":[\"Succeeded\"]}}}}}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.createOrReplaceWorkflowWithResponse(
"4afb5752-e47f-43a1-8ba7-c696bf8d2745", workflowCreateOrUpdateCommand, requestOptions);
} | class ReadmeSamples {
PurviewWorkflowClient purviewWorkflowClient = new PurviewWorkflowClientBuilder()
.endpoint(Configuration.getGlobalConfiguration().get("ENDPOINT", "endpoint"))
.credential(new UsernamePasswordCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("CLIENTID", "clientId"))
.tenantId(Configuration.getGlobalConfiguration().get("TENANTID", "tenantId"))
.username(Configuration.getGlobalConfiguration().get("USERNAME", "username"))
.password(Configuration.getGlobalConfiguration().get("PASSWORD", "password"))
.build())
.buildClient();
public void submitUserRequests(){
BinaryData userRequestsPayload =
BinaryData.fromString(
"{\"comment\":\"Thanks!\",\"operations\":[{\"type\":\"CreateTerm\",\"payload\":{\"glossaryTerm\":{\"name\":\"term\",\"anchor\":{\"glossaryGuid\":\"20031e20-b4df-4a66-a61d-1b0716f3fa48\"},\"nickName\":\"term\",\"status\":\"Approved\"}}}]}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.submitUserRequestsWithResponse(userRequestsPayload, requestOptions);
}
public void approveWorkflowTask(){
BinaryData approvalResponseComment = BinaryData.fromString("{\"comment\":\"Thanks for raising this!\"}");
RequestOptions requestOptions = new RequestOptions();
Response<Void> response =
purviewWorkflowClient.approveApprovalTaskWithResponse(
"69b57a00-f5de-4a17-a44a-6479adae373d", approvalResponseComment, requestOptions);
}
} | class ReadmeSamples {
PurviewWorkflowClient purviewWorkflowClient = new PurviewWorkflowClientBuilder()
.endpoint(Configuration.getGlobalConfiguration().get("ENDPOINT", "endpoint"))
.credential(new UsernamePasswordCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("CLIENTID", "clientId"))
.tenantId(Configuration.getGlobalConfiguration().get("TENANTID", "tenantId"))
.username(Configuration.getGlobalConfiguration().get("USERNAME", "username"))
.password(Configuration.getGlobalConfiguration().get("PASSWORD", "password"))
.build())
.buildClient();
public void submitUserRequests() {
BinaryData userRequestsPayload =
BinaryData.fromString(
"{\"comment\":\"Thanks!\",\"operations\":[{\"type\":\"CreateTerm\",\"payload\":{\"glossaryTerm\":{\"name\":\"term\",\"anchor\":{\"glossaryGuid\":\"20031e20-b4df-4a66-a61d-1b0716f3fa48\"},\"nickName\":\"term\",\"status\":\"Approved\"}}}]}");
RequestOptions requestOptions = new RequestOptions();
Response<BinaryData> response =
purviewWorkflowClient.submitUserRequestsWithResponse(userRequestsPayload, requestOptions);
}
public void approveWorkflowTask() {
BinaryData approvalResponseComment = BinaryData.fromString("{\"comment\":\"Thanks for raising this!\"}");
RequestOptions requestOptions = new RequestOptions();
Response<Void> response =
purviewWorkflowClient.approveApprovalTaskWithResponse(
"69b57a00-f5de-4a17-a44a-6479adae373d", approvalResponseComment, requestOptions);
}
} |
We also need to enable sync rest proxy when sending sync request ```java private Context enableSyncRestProxy(Context context) { return context.addData(HTTP_REST_PROXY_SYNC_PROXY_ENABLE, true); } ``` | public Response<TableItem> createTableWithResponse(Duration timeout, Context context) {
Context contextValue = setContext(context);
final TableProperties properties = new TableProperties().setTableName(tableName);
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
Callable<Response<TableItem>> createTableOp = () ->
new SimpleResponse<>(tablesImplementation.getTables().createWithResponse(properties,
null,
ResponseFormat.RETURN_NO_CONTENT, null, contextValue),
ModelHelper.createItem(new TableResponseProperties().setTableName(tableName)));
ScheduledFuture<Response<TableItem>> scheduledFuture =
scheduler.schedule(createTableOp, IMMEDIATELY, TimeUnit.SECONDS);
try {
Response<TableItem> response = scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
scheduler.shutdown();
return response;
} catch (Exception ex) {
scheduler.shutdown();
throw logger.logExceptionAsError((RuntimeException) TableUtils.mapThrowableToTableServiceException(ex));
}
} | Context contextValue = setContext(context); | public Response<TableItem> createTableWithResponse(Duration timeout, Context context) {
Context contextValue = TableUtils.setContext(context, true);
final TableProperties properties = new TableProperties().setTableName(tableName);
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
Callable<Response<TableItem>> callable = () ->
new SimpleResponse<>(tablesImplementation.getTables().createWithResponse(properties,
null,
ResponseFormat.RETURN_NO_CONTENT, null, contextValue),
ModelHelper.createItem(new TableResponseProperties().setTableName(tableName)));
try {
Response<TableItem> response = timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
return response;
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) TableUtils.mapThrowableToTableServiceException(ex));
}
} | class TableClient {
private static final String DELIMITER_CONTINUATION_TOKEN = ";";
private final ClientLogger logger = new ClientLogger(TableClient.class);
private final String tableName;
private final AzureTableImpl tablesImplementation;
private final TransactionalBatchImpl transactionalBatchImplementation;
private final String accountName;
private final String tableEndpoint;
private final HttpPipeline pipeline;
private final TableClient transactionalBatchClient;
private static final int IMMEDIATELY = -1;
TableClient(String tableName, HttpPipeline pipeline, String serviceUrl, TableServiceVersion serviceVersion,
SerializerAdapter tablesSerializer, SerializerAdapter transactionalBatchSerializer) {
try {
if (tableName == null) {
throw new NullPointerException(("'tableName' must not be null to create TableClient."));
}
if (tableName.isEmpty()) {
throw new IllegalArgumentException("'tableName' must not be empty to create a TableClient.");
}
final URI uri = URI.create(serviceUrl);
this.accountName = uri.getHost().split("\\.", 2)[0];
this.tableEndpoint = uri.resolve("/" + tableName).toString();
logger.verbose("Table Service URI: {}", uri);
} catch (NullPointerException | IllegalArgumentException ex) {
throw logger.logExceptionAsError(ex);
}
this.tablesImplementation = new AzureTableImplBuilder()
.url(serviceUrl)
.serializerAdapter(tablesSerializer)
.pipeline(pipeline)
.version(serviceVersion.getVersion())
.buildClient();
this.transactionalBatchImplementation =
new TransactionalBatchImpl(tablesImplementation, transactionalBatchSerializer);
this.tableName = tableName;
this.pipeline = tablesImplementation.getHttpPipeline();
this.transactionalBatchClient = new TableClient(this, serviceVersion, tablesSerializer);
}
TableClient(TableClient client, ServiceVersion serviceVersion, SerializerAdapter tablesSerializer) {
this.accountName = client.getAccountName();
this.tableEndpoint = client.getTableEndpoint();
this.pipeline = BuilderHelper.buildNullClientPipeline();
this.tablesImplementation = new AzureTableImplBuilder()
.url(client.getTablesImplementation().getUrl())
.serializerAdapter(tablesSerializer)
.pipeline(this.pipeline)
.version(serviceVersion.getVersion())
.buildClient();
this.tableName = client.getTableName();
this.transactionalBatchImplementation = null;
this.transactionalBatchClient = null;
}
/**
* Gets the name of the table.
*
* @return The name of the table.
*/
public String getTableName() {
return tableName;
}
/**
* Gets the name of the account containing the table.
*
* @return The name of the account containing the table.
*/
public String getAccountName() {
return accountName;
}
/**
* Gets the endpoint for this table.
*
* @return The endpoint for this table.
*/
public String getTableEndpoint() {
return tableEndpoint;
}
HttpPipeline getHttpPipeline() {
return this.pipeline;
}
/**
* Gets the {@link AzureTableImpl} powering this client.
*
* @return This client's {@link AzureTableImpl}.
*/
AzureTableImpl getTablesImplementation() {
return tablesImplementation;
}
/**
* Gets the REST API version used by this client.
*
* @return The REST API version used by this client.
*/
public TableServiceVersion getServiceVersion() {
return TableServiceVersion.fromString(tablesImplementation.getVersion());
}
/**
* Generates a service SAS for the table using the specified {@link TableSasSignatureValues}.
*
* <p><strong>Note:</strong> The client must be authenticated via {@link AzureNamedKeyCredential}.</p>
* <p>See {@link TableSasSignatureValues} for more information on how to construct a service SAS.</p>
*
* @param tableSasSignatureValues {@link TableSasSignatureValues}.
*
* @return A {@code String} representing the SAS query parameters.
*
* @throws IllegalStateException If this {@link TableClient} is not authenticated with an
* {@link AzureNamedKeyCredential}.
*/
public String generateSas(TableSasSignatureValues tableSasSignatureValues) {
AzureNamedKeyCredential azureNamedKeyCredential = TableSasUtils.extractNamedKeyCredential(getHttpPipeline());
if (azureNamedKeyCredential == null) {
throw logger.logExceptionAsError(new IllegalStateException("Cannot generate a SAS token with a client that"
+ " is not authenticated with an AzureNamedKeyCredential."));
}
return new TableSasGenerator(tableSasSignatureValues, getTableName(), azureNamedKeyCredential).getSas();
}
private Context setContext(Context context) {
return context == null ? Context.NONE : context;
}
private Long setTimeout(Duration timeout) {
return timeout != null ? timeout.toMillis() : Duration.ofDays(1).toMillis();
}
/**
* Creates the table within the Tables service.
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a table. Prints out the details of the created table.</p>
* <!-- src_embed com.azure.data.tables.tableClient.createTable -->
* <pre>
* TableItem tableItem = tableClient.createTable&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.createTable -->
*
* @return A {@link TableItem} that represents the table.
*
* @throws TableServiceException If a table with the same name already exists within the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TableItem createTable() {
return createTableWithResponse(null, null).getValue();
}
/**
* Creates the table within the Tables service.
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a table. Prints out the details of the {@link Response HTTP response} and the created table.</p>
* <!-- src_embed com.azure.data.tables.tableClient.createTableWithResponse
* <pre>
* Response<TableItem> response = tableClient.createTableWithResponse&
* new Context&
*
* System.out.printf&
* response.getStatusCode&
* </pre>
* <!-- end com.azure.data.tables.tableClient.createTableWithResponse
*
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response} containing a {@link TableItem} that represents the table.
*
* @throws TableServiceException If a table with the same name already exists within the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Deletes the table within the Tables service.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes a table.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteTable -->
* <pre>
* tableClient.deleteTable&
*
* System.out.print&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteTable -->
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void deleteTable() {
deleteTableWithResponse(null, null);
}
/**
* Deletes the table within the Tables service.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes a table. Prints out the details of the {@link Response HTTP response}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteTableWithResponse
* <pre>
* Response<Void> response = tableClient.deleteTableWithResponse&
* new Context&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteTableWithResponse
*
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteTableWithResponse(Duration timeout, Context context) {
Context contextValue = setContext(context);
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
Callable<Response<Void>> deleteTableOp = () ->
new SimpleResponse<>(tablesImplementation.getTables().deleteWithResponse(
tableName, null, contextValue),
null);
ScheduledFuture<Response<Void>> scheduledFuture =
scheduler.schedule(deleteTableOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
Throwable except = mapThrowableToTableServiceException(ex);
return swallow404Exception(except);
}
}
private Response<Void> swallow404Exception(Throwable ex) {
if (ex instanceof TableServiceException
&&
((TableServiceException) ex).getResponse().getStatusCode() == 404) {
return new SimpleResponse<>(
((TableServiceException) ex).getResponse().getRequest(),
((TableServiceException) ex).getResponse().getStatusCode(),
((TableServiceException) ex).getResponse().getHeaders(),
null);
} else {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Inserts an {@link TableEntity entity} into the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Inserts an {@link TableEntity entity} into the table. Prints out the details of the created
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.createEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* TableEntity tableEntity = new TableEntity&
* .addProperty&
*
* tableClient.createEntity&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.createEntity
*
* @param entity The {@link TableEntity entity} to insert.
*
* @throws TableServiceException If an {@link TableEntity entity} with the same partition key and row key already
* exists within the table.
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void createEntity(TableEntity entity) {
createEntityWithResponse(entity, null, null);
}
/**
* Inserts an {@link TableEntity entity} into the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Inserts an {@link TableEntity entity} into the table. Prints out the details of the
* {@link Response HTTP response} and the created {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.createEntityWithResponse
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* TableEntity myTableEntity = new TableEntity&
* .addProperty&
*
* Response<Void> response = tableClient.createEntityWithResponse&
* new Context&
*
* System.out.printf&
* + " '%s' was created.", response.getStatusCode&
* </pre>
* <!-- end com.azure.data.tables.tableClient.createEntityWithResponse
*
* @param entity The {@link TableEntity entity} to insert.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws TableServiceException If an {@link TableEntity entity} with the same partition key and row key already
* exists within the table.
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> createEntityWithResponse(TableEntity entity, Duration timeout, Context context) {
Context contextValue = setContext(context);
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
if (entity == null) {
throw logger.logExceptionAsError(new IllegalArgumentException("'entity' cannot be null."));
}
EntityHelper.setPropertiesFromGetters(entity, logger);
Callable<Response<Void>> createEntityOp = () -> {
Response<Map<String, Object>> response = tablesImplementation.getTables().insertEntityWithResponse(
tableName, null, null, ResponseFormat.RETURN_NO_CONTENT,
entity.getProperties(), null, contextValue);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
};
ScheduledFuture<Response<Void>> scheduledFuture =
scheduler.schedule(createEntityOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException)(TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Inserts an {@link TableEntity entity} into the table if it does not exist, or merges the
* {@link TableEntity entity} with the existing {@link TableEntity entity} otherwise.
*
* <p><strong>Code Samples</strong></p>
* <p>Upserts an {@link TableEntity entity} into the table. Prints out the details of the upserted
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.upsertEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* TableEntity tableEntity = new TableEntity&
* .addProperty&
*
* tableClient.upsertEntity&
*
* System.out.printf&
* rowKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.upsertEntity
*
* @param entity The {@link TableEntity entity} to upsert.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void upsertEntity(TableEntity entity) {
upsertEntityWithResponse(entity, null, null, null);
}
/**
* Inserts an {@link TableEntity entity} into the table if it does not exist, or updates the existing
* {@link TableEntity entity} using the specified {@link TableEntityUpdateMode update mode} otherwise. The default
* {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
*
* <p>When the {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will be merged into the existing {@link TableEntity entity}. When the
* {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will completely replace those in the existing {@link TableEntity
* entity}.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Upserts an {@link TableEntity entity} into the table with the specified
* {@link TableEntityUpdateMode update mode} if said {@link TableEntity entity} already exists. Prints out the
* details of the {@link Response HTTP response} and the upserted {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.upsertEntityWithResponse
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* TableEntity myTableEntity = new TableEntity&
* .addProperty&
*
* Response<Void> response = tableClient.upsertEntityWithResponse&
* Duration.ofSeconds&
*
* System.out.printf&
* + " '%s' was updated&
* </pre>
* <!-- end com.azure.data.tables.tableClient.upsertEntityWithResponse
*
* @param entity The {@link TableEntity entity} to upsert.
* @param updateMode The type of update to perform if the {@link TableEntity entity} already exits.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> upsertEntityWithResponse(TableEntity entity, TableEntityUpdateMode updateMode,
Duration timeout, Context context) {
Context contextValue = setContext(context);
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
if (entity == null) {
throw logger.logExceptionAsError(new IllegalArgumentException("'entity' cannot be null."));
}
String partitionKey = escapeSingleQuotes(entity.getPartitionKey());
String rowKey = escapeSingleQuotes(entity.getRowKey());
EntityHelper.setPropertiesFromGetters(entity, logger);
Callable<Response<Void>> upsertEntityOp = () -> {
if (updateMode == TableEntityUpdateMode.REPLACE) {
return tablesImplementation.getTables().updateEntityWithResponse(
tableName, partitionKey, rowKey, null, null, null,
entity.getProperties(), null, contextValue);
} else {
return tablesImplementation.getTables().mergeEntityWithResponse(
tableName, partitionKey, rowKey, null, null, null,
entity.getProperties(), null, contextValue);
}
};
ScheduledFuture<Response<Void>> scheduledFuture =
scheduler.schedule(upsertEntityOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException)(TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Updates an existing {@link TableEntity entity} by merging the provided {@link TableEntity entity} with the
* existing {@link TableEntity entity}.
*
* <p><strong>Code Samples</strong></p>
* <p>Updates a {@link TableEntity entity} on the table. Prints out the details of the updated
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.updateEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* TableEntity tableEntity = new TableEntity&
* .addProperty&
*
* tableClient.updateEntity&
*
* System.out.printf&
* rowKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.updateEntity
*
* @param entity The {@link TableEntity entity} to update.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If no {@link TableEntity entity} with the same partition key and row key exists
* within the table.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void updateEntity(TableEntity entity) {
updateEntity(entity, null);
}
/**
* Updates an existing {@link TableEntity entity} using the specified {@link TableEntityUpdateMode update mode}.
* The default {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
*
* <p>When the {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will be merged into the existing {@link TableEntity entity}. When the
* {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will completely replace those in the existing {@link TableEntity entity}.
* </p>
*
* <p><strong>Code Samples</strong></p>
* <p>Updates a {@link TableEntity entity} on the table with the specified
* {@link TableEntityUpdateMode update mode}. Prints out the details of the updated {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.updateEntity
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* TableEntity myTableEntity = new TableEntity&
* .addProperty&
*
* tableClient.updateEntity&
*
* System.out.printf&
* rowKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.updateEntity
*
* @param entity The {@link TableEntity entity} to update.
* @param updateMode The type of update to perform.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If no {@link TableEntity entity} with the same partition key and row key exists
* within the table.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void updateEntity(TableEntity entity, TableEntityUpdateMode updateMode) {
updateEntityWithResponse(entity, updateMode, false, null, null);
}
/**
* Updates an existing {@link TableEntity entity} using the specified {@link TableEntityUpdateMode update mode}.
* The default {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
*
* <p>When the {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will be merged into the existing {@link TableEntity entity}. When the
* {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will completely replace those in the existing {@link TableEntity entity}.
* </p>
*
* <p><strong>Code Samples</strong></p>
* <p>Updates a {@link TableEntity entity} on the table with the specified {@link TableEntityUpdateMode update
* mode}
* if the {@code ETags} on both {@link TableEntity entities} match. Prints out the details of the
* {@link Response HTTP response} updated {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.updateEntityWithResponse
* <pre>
* String somePartitionKey = "partitionKey";
* String someRowKey = "rowKey";
*
* TableEntity someTableEntity = new TableEntity&
* .addProperty&
*
* Response<Void> response = tableClient.updateEntityWithResponse&
* true, Duration.ofSeconds&
*
* System.out.printf&
* + " '%s' was updated.", response.getStatusCode&
* </pre>
* <!-- end com.azure.data.tables.tableClient.updateEntityWithResponse
*
* @param entity The {@link TableEntity entity} to update.
* @param updateMode The type of update to perform.
* @param ifUnchanged When true, the ETag of the provided {@link TableEntity entity} must match the ETag of the
* {@link TableEntity entity} in the Table service. If the values do not match, the update will not occur and an
* exception will be thrown.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If no {@link TableEntity entity} with the same partition key and row key exists
* within the table, or if {@code ifUnchanged} is {@code true} and the existing {@link TableEntity entity}'s ETag
* does not match that of the provided {@link TableEntity entity}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> updateEntityWithResponse(TableEntity entity, TableEntityUpdateMode updateMode,
boolean ifUnchanged, Duration timeout, Context context) {
Context contextValue = setContext(context);
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
if (entity == null) {
throw logger.logExceptionAsError(new IllegalArgumentException("'entity' cannot be null."));
}
String partitionKey = escapeSingleQuotes(entity.getPartitionKey());
String rowKey = escapeSingleQuotes(entity.getRowKey());
String eTag = ifUnchanged ? entity.getETag() : "*";
EntityHelper.setPropertiesFromGetters(entity, logger);
Callable<Response<Void>> updateEntityOp = () -> {
if (updateMode == TableEntityUpdateMode.REPLACE) {
return tablesImplementation.getTables()
.updateEntityWithResponse(tableName, partitionKey, rowKey, null, null, eTag,
entity.getProperties(), null, contextValue);
} else {
return tablesImplementation.getTables()
.mergeEntityWithResponse(tableName, partitionKey, rowKey, null, null, eTag,
entity.getProperties(), null, contextValue);
}
};
ScheduledFuture<Response<Void>> scheduledFuture =
scheduler.schedule(updateEntityOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException)(TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Deletes an {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes an {@link TableEntity entity} on the table. Prints out the entity's {@code partitionKey} and
* {@code rowKey}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* tableClient.deleteEntity&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteEntity
*
* @param partitionKey The partition key of the {@link TableEntity entity}.
* @param rowKey The row key of the {@link TableEntity entity}.
*
* @throws IllegalArgumentException If the provided {@code partitionKey} or {@code rowKey} are {@code null} or
* empty.
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void deleteEntity(String partitionKey, String rowKey) {
deleteEntityWithResponse(partitionKey, rowKey, null, false, null, null);
}
/**
* Deletes an {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes a {@link TableEntity entity} on the table. Prints out the details of the deleted
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteEntity
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* TableEntity myTableEntity = new TableEntity&
* .addProperty&
*
* tableClient.deleteEntity&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteEntity
*
* @param entity The {@link TableEntity entity} to delete.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void deleteEntity(TableEntity entity) {
deleteEntityWithResponse(entity, false, null, null);
}
/**
* Deletes an {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes a {@link TableEntity entity} on the table. Prints out the details of the
* {@link Response HTTP response} and the deleted {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteEntityWithResponse
* <pre>
* String somePartitionKey = "partitionKey";
* String someRowKey = "rowKey";
*
* TableEntity someTableEntity = new TableEntity&
* .addProperty&
*
* Response<Void> response = tableClient.deleteEntityWithResponse&
* new Context&
*
* System.out.printf&
* + " '%s' was deleted.", response.getStatusCode&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteEntityWithResponse
*
* @param entity The table {@link TableEntity entity} to delete.
* @param ifUnchanged When true, the ETag of the provided {@link TableEntity entity} must match the ETag of the
* {@link TableEntity entity} in the Table service. If the values do not match, the update will not occur and an
* exception will be thrown.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteEntityWithResponse(TableEntity entity, boolean ifUnchanged, Duration timeout,
Context context) {
return deleteEntityWithResponse(
entity.getPartitionKey(), entity.getRowKey(), entity.getETag(), ifUnchanged, timeout, context);
}
private Response<Void> deleteEntityWithResponse(String partitionKey, String rowKey, String eTag, boolean ifUnchanged,
Duration timeout, Context context) {
Context contextValue = setContext(context);
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
String finalETag = ifUnchanged ? eTag : "*";
if (isNullOrEmpty(partitionKey) || isNullOrEmpty(rowKey)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'partitionKey' and 'rowKey' cannot be null"));
}
Callable<Response<Void>> deleteEntityOp = () -> tablesImplementation.getTables().deleteEntityWithResponse(
tableName, escapeSingleQuotes(partitionKey), escapeSingleQuotes(rowKey), finalETag, null,
null, null, contextValue);
ScheduledFuture<Response<Void>> scheduledFuture = scheduler.schedule(deleteEntityOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
Throwable except = mapThrowableToTableServiceException(ex);
return swallow404Exception(except);
}
}
/**
* Lists all {@link TableEntity entities} within the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Lists all {@link TableEntity entities} on the table. Prints out the details of the
* retrieved {@link TableEntity entities}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.listEntities -->
* <pre>
* PagedIterable<TableEntity> tableEntities = tableClient.listEntities&
*
* tableEntities.forEach&
* System.out.printf&
* tableEntity.getPartitionKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.listEntities -->
*
* @return A {@link PagedIterable} containing all {@link TableEntity entities} within the table.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<TableEntity> listEntities() {
return listEntities(new ListEntitiesOptions(), null, null);
}
/**
* Lists {@link TableEntity entities} using the parameters in the provided options.
*
* <p>If the {@code filter} parameter in the options is set, only {@link TableEntity entities} matching the filter
* will be returned. If the {@code select} parameter is set, only the properties included in the select parameter
* will be returned for each {@link TableEntity entity}. If the {@code top} parameter is set, the maximum number of
* returned {@link TableEntity entities} per page will be limited to that value.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Lists all {@link TableEntity entities} on the table. Prints out the details of the
* {@link Response HTTP response} and all the retrieved {@link TableEntity entities}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.listEntities
* <pre>
* List<String> propertiesToSelect = new ArrayList<>&
* propertiesToSelect.add&
* propertiesToSelect.add&
* propertiesToSelect.add&
*
* ListEntitiesOptions listEntitiesOptions = new ListEntitiesOptions&
* .setTop&
* .setFilter&
* .setSelect&
*
* PagedIterable<TableEntity> myTableEntities = tableClient.listEntities&
* Duration.ofSeconds&
*
* myTableEntities.forEach&
* System.out.printf&
* tableEntity.getPartitionKey&
*
* tableEntity.getProperties&
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.data.tables.tableClient.listEntities
*
* @param options The {@code filter}, {@code select}, and {@code top} OData query options to apply to this
* operation.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return A {@link PagedIterable} containing matching {@link TableEntity entities} within the table.
*
* @throws IllegalArgumentException If one or more of the OData query options in {@code options} is malformed.
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<TableEntity> listEntities(ListEntitiesOptions options, Duration timeout, Context context) {
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
Callable<PagedIterable<TableEntity>> listEntitiesOp = () -> new PagedIterable<>(
() -> listEntitiesFirstPage(context, options, TableEntity.class),
token -> listEntitiesNextPage(token, context, options, TableEntity.class));
ScheduledFuture<PagedIterable<TableEntity>> scheduledFuture =
scheduler.schedule(listEntitiesOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException)(TableUtils.mapThrowableToTableServiceException(ex)));
}
}
private <T extends TableEntity> PagedResponse<T> listEntitiesFirstPage(Context context,
ListEntitiesOptions options,
Class<T> resultType) {
return listEntities(null, null, context, options, resultType);
}
private <T extends TableEntity> PagedResponse<T> listEntitiesNextPage(String token, Context context,
ListEntitiesOptions options,
Class<T> resultType) {
if (token == null) {
return null;
}
String[] split = token.split(DELIMITER_CONTINUATION_TOKEN, 2);
if (split.length != 2) {
throw logger.logExceptionAsError(new RuntimeException(
"Split done incorrectly, must have partition and row key: " + token));
}
String nextPartitionKey = split[0];
String nextRowKey = split[1];
return listEntities(nextPartitionKey, nextRowKey, context, options, resultType);
}
private <T extends TableEntity> PagedResponse<T> listEntities(String nextPartitionKey, String nextRowKey,
Context context, ListEntitiesOptions options,
Class<T> resultType) {
Context contextValue = setContext(context);
String select = null;
if (options.getSelect() != null) {
select = String.join(",", options.getSelect());
}
QueryOptions queryOptions = new QueryOptions()
.setFilter(options.getFilter())
.setTop(options.getTop())
.setSelect(select)
.setFormat(OdataMetadataFormat.APPLICATION_JSON_ODATA_FULLMETADATA);
final ResponseBase<TablesQueryEntitiesHeaders, TableEntityQueryResponse> response =
tablesImplementation.getTables().queryEntitiesWithResponse(tableName, null, null,
nextPartitionKey, nextRowKey, queryOptions, contextValue);
final TableEntityQueryResponse tablesQueryEntityResponse = response.getValue();
if (tablesQueryEntityResponse == null) {
return null;
}
final List<Map<String, Object>> entityResponseValue = tablesQueryEntityResponse.getValue();
if (entityResponseValue == null) {
return null;
}
final List<T> entities = entityResponseValue.stream()
.map(ModelHelper::createEntity)
.map(e -> EntityHelper.convertToSubclass(e, resultType, logger))
.collect(Collectors.toList());
return new EntityPaged<>(response, entities,
response.getDeserializedHeaders().getXMsContinuationNextPartitionKey(),
response.getDeserializedHeaders().getXMsContinuationNextRowKey());
}
private static class EntityPaged<T extends TableEntity> implements PagedResponse<T> {
private final Response<TableEntityQueryResponse> httpResponse;
private final IterableStream<T> entityStream;
private final String continuationToken;
EntityPaged(Response<TableEntityQueryResponse> httpResponse, List<T> entityList,
String nextPartitionKey, String nextRowKey) {
if (nextPartitionKey == null || nextRowKey == null) {
this.continuationToken = null;
} else {
this.continuationToken = String.join(DELIMITER_CONTINUATION_TOKEN, nextPartitionKey, nextRowKey);
}
this.httpResponse = httpResponse;
this.entityStream = IterableStream.of(entityList);
}
@Override
public int getStatusCode() {
return httpResponse.getStatusCode();
}
@Override
public HttpHeaders getHeaders() {
return httpResponse.getHeaders();
}
@Override
public HttpRequest getRequest() {
return httpResponse.getRequest();
}
@Override
public IterableStream<T> getElements() {
return entityStream;
}
@Override
public String getContinuationToken() {
return continuationToken;
}
@Override
public void close() {
}
}
/**
* Gets a single {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets an {@link TableEntity entity} on the table. Prints out the details of the retrieved
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.getEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* TableEntity tableEntity = tableClient.getEntity&
*
* System.out.printf&
* tableEntity.getRowKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.getEntity
*
* @param partitionKey The partition key of the {@link TableEntity entity}.
* @param rowKey The partition key of the {@link TableEntity entity}.
*
* @return The {@link TableEntity entity}.
*
* @throws IllegalArgumentException If the provided {@code partitionKey} or {@code rowKey} are {@code null} or
* empty.
* @throws TableServiceException If no {@link TableEntity entity} with the provided {@code partitionKey} and
* {@code rowKey} exists within the table.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TableEntity getEntity(String partitionKey, String rowKey) {
return getEntityWithResponse(partitionKey, rowKey, null, null, null).getValue();
}
/**
* Gets a single {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets an {@link TableEntity entity} on the table. Prints out the details of the {@link Response HTTP response}
* retrieved {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.getEntityWithResponse
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* List<String> propertiesToSelect = new ArrayList<>&
* propertiesToSelect.add&
* propertiesToSelect.add&
* propertiesToSelect.add&
*
* Response<TableEntity> response = tableClient.getEntityWithResponse&
* Duration.ofSeconds&
*
* TableEntity myTableEntity = response.getValue&
*
* System.out.printf&
* + " '%s' and properties:", response.getStatusCode&
* myTableEntity.getRowKey&
*
* myTableEntity.getProperties&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.getEntityWithResponse
*
* @param partitionKey The partition key of the {@link TableEntity entity}.
* @param rowKey The partition key of the {@link TableEntity entity}.
* @param select A list of properties to select on the {@link TableEntity entity}.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response} containing the {@link TableEntity entity}.
*
* @throws IllegalArgumentException If the provided {@code partitionKey} or {@code rowKey} are {@code null} or
* empty, or if the {@code select} OData query option is malformed.
* @throws TableServiceException If no {@link TableEntity entity} with the provided {@code partitionKey} and
* {@code rowKey} exists within the table.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<TableEntity> getEntityWithResponse(String partitionKey, String rowKey, List<String> select,
Duration timeout, Context context) {
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
Context contextValue = setContext(context);
QueryOptions queryOptions = new QueryOptions()
.setFormat(OdataMetadataFormat.APPLICATION_JSON_ODATA_FULLMETADATA);
if (select != null) {
queryOptions.setSelect(String.join(",", select));
}
if (isNullOrEmpty(partitionKey) || isNullOrEmpty(rowKey)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'partitionKey' and 'rowKey' cannot be null."));
}
Callable<Response<TableEntity>> getEntityOp = () -> {
ResponseBase<TablesQueryEntityWithPartitionAndRowKeyHeaders, Map<String, Object>> response =
tablesImplementation.getTables().queryEntityWithPartitionAndRowKeyWithResponse(
tableName, escapeSingleQuotes(partitionKey), escapeSingleQuotes(rowKey), null, null,
queryOptions, contextValue);
final Map<String, Object> matchingEntity = response.getValue();
if (matchingEntity == null || matchingEntity.isEmpty()) {
logger.info("There was no matching entity. Table {}, partition key: {}, row key: {}.",
tableName, partitionKey, rowKey);
return null;
}
final TableEntity entity = ModelHelper.createEntity(matchingEntity);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
EntityHelper.convertToSubclass(entity, TableEntity.class, logger));
};
ScheduledFuture<Response<TableEntity>> scheduledFuture =
scheduler.schedule(getEntityOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException)(TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Retrieves details about any stored {@link TableAccessPolicies access policies} specified on the table that may
* be used with Shared Access Signatures.
*
* <p>This operation is only supported on Azure Storage endpoints.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Gets a table's {@link TableAccessPolicies access policies}. Prints out the details of the retrieved
* {@link TableAccessPolicies access policies}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.getAccessPolicies -->
* <pre>
* TableAccessPolicies accessPolicies = tableClient.getAccessPolicies&
*
* accessPolicies.getIdentifiers&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.getAccessPolicies -->
*
* @return The table's {@link TableAccessPolicies access policies}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TableAccessPolicies getAccessPolicies() {
return getAccessPoliciesWithResponse(null, null).getValue();
}
/**
* Retrieves details about any stored {@link TableAccessPolicies access policies} specified on the table that may be
* used with Shared Access Signatures.
*
* <p>This operation is only supported on Azure Storage endpoints.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Gets a table's {@link TableAccessPolicies access policies}. Prints out the details of the
* {@link Response HTTP response} and the retrieved {@link TableAccessPolicies access policies}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.getAccessPoliciesWithResponse
* <pre>
* List<String> propertiesToSelect = new ArrayList<>&
* propertiesToSelect.add&
* propertiesToSelect.add&
* propertiesToSelect.add&
*
* Response<TableAccessPolicies> response = tableClient.getAccessPoliciesWithResponse&
* new Context&
*
* System.out.printf&
* + " IDs:", response.getStatusCode&
*
* response.getValue&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.getAccessPoliciesWithResponse
*
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return An {@link Response HTTP response} containing the table's {@link TableAccessPolicies access policies}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<TableAccessPolicies> getAccessPoliciesWithResponse(Duration timeout, Context context) {
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
Context contextValue = setContext(context);
Callable<Response<TableAccessPolicies>> getAccessPoliciesOp = () -> {
ResponseBase<TablesGetAccessPolicyHeaders, List<SignedIdentifier>> response =
tablesImplementation.getTables().getAccessPolicyWithResponse(
tableName, null, null, contextValue
);
return new SimpleResponse<>(response,
new TableAccessPolicies(response.getValue() == null ? null : response.getValue().stream()
.map(this::toTableSignedIdentifier)
.collect(Collectors.toList())));
};
ScheduledFuture<Response<TableAccessPolicies>> scheduledFuture =
scheduler.schedule(getAccessPoliciesOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException)(TableUtils.mapThrowableToTableServiceException(ex)));
}
}
private TableSignedIdentifier toTableSignedIdentifier(SignedIdentifier signedIdentifier) {
if (signedIdentifier == null) {
return null;
}
return new TableSignedIdentifier(signedIdentifier.getId())
.setAccessPolicy(toTableAccessPolicy(signedIdentifier.getAccessPolicy()));
}
private TableAccessPolicy toTableAccessPolicy(AccessPolicy accessPolicy) {
if (accessPolicy == null) {
return null;
}
return new TableAccessPolicy()
.setExpiresOn(accessPolicy.getExpiry())
.setStartsOn(accessPolicy.getStart())
.setPermissions(accessPolicy.getPermission());
}
/**
* Sets stored {@link TableAccessPolicies access policies} for the table that may be used with Shared Access
* Signatures.
*
* <p>This operation is only supported on Azure Storage endpoints.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Sets stored {@link TableAccessPolicies access policies} on a table.</p>
* <!-- src_embed com.azure.data.tables.tableClient.setAccessPolicies
* <pre>
* List<TableSignedIdentifier> signedIdentifiers = new ArrayList<>&
*
* signedIdentifiers.add&
* .setAccessPolicy&
* .setStartsOn&
* .setExpiresOn&
* .setPermissions&
* signedIdentifiers.add&
* .setAccessPolicy&
* .setStartsOn&
* .setExpiresOn&
* .setPermissions&
*
* tableClient.setAccessPolicies&
*
* System.out.print&
* </pre>
* <!-- end com.azure.data.tables.tableClient.setAccessPolicies
*
* @param tableSignedIdentifiers The {@link TableSignedIdentifier access policies} for the table.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void setAccessPolicies(List<TableSignedIdentifier> tableSignedIdentifiers) {
setAccessPoliciesWithResponse(tableSignedIdentifiers, null, null);
}
/**
* Sets stored {@link TableAccessPolicies access policies} for the table that may be used with Shared Access
* Signatures.
*
* <p>This operation is only supported on Azure Storage endpoints.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Sets stored {@link TableAccessPolicies access policies} on a table. Prints out details of the
* {@link Response HTTP response}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.setAccessPoliciesWithResponse
* <pre>
* List<TableSignedIdentifier> mySignedIdentifiers = new ArrayList<>&
*
* mySignedIdentifiers.add&
* .setAccessPolicy&
* .setStartsOn&
* .setExpiresOn&
* .setPermissions&
* mySignedIdentifiers.add&
* .setAccessPolicy&
* .setStartsOn&
* .setExpiresOn&
* .setPermissions&
*
* Response<Void> response = tableClient.setAccessPoliciesWithResponse&
* new Context&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.setAccessPoliciesWithResponse
*
* @param tableSignedIdentifiers The {@link TableSignedIdentifier access policies} for the table.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> setAccessPoliciesWithResponse(List<TableSignedIdentifier> tableSignedIdentifiers,
Duration timeout, Context context) {
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
Context contextValue = setContext(context);
List<SignedIdentifier> signedIdentifiers = null;
if (tableSignedIdentifiers != null) {
signedIdentifiers = tableSignedIdentifiers.stream()
.map(tableSignedIdentifier -> {
SignedIdentifier signedIdentifier = toSignedIdentifier(tableSignedIdentifier);
if (signedIdentifier != null) {
if (signedIdentifier.getAccessPolicy() != null
&& signedIdentifier.getAccessPolicy().getStart() != null) {
signedIdentifier.getAccessPolicy()
.setStart(signedIdentifier.getAccessPolicy()
.getStart().truncatedTo(ChronoUnit.SECONDS));
}
if (signedIdentifier.getAccessPolicy() != null
&& signedIdentifier.getAccessPolicy().getExpiry() != null) {
signedIdentifier.getAccessPolicy()
.setExpiry(signedIdentifier.getAccessPolicy()
.getExpiry().truncatedTo(ChronoUnit.SECONDS));
}
}
return signedIdentifier;
})
.collect(Collectors.toList());
}
List<SignedIdentifier> finalSignedIdentifiers = signedIdentifiers;
Callable<Response<Void>> setAccessPoliciesOp = () -> {
ResponseBase<TablesSetAccessPolicyHeaders, Void> response = tablesImplementation.getTables()
.setAccessPolicyWithResponse(tableName, null, null,
finalSignedIdentifiers, contextValue);
return new SimpleResponse<>(response, response.getValue());
};
ScheduledFuture<Response<Void>> scheduledFuture =
scheduler.schedule(setAccessPoliciesOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException)(TableUtils.mapThrowableToTableServiceException(ex)));
}
}
private SignedIdentifier toSignedIdentifier(TableSignedIdentifier tableSignedIdentifier) {
if (tableSignedIdentifier == null) {
return null;
}
return new SignedIdentifier()
.setId(tableSignedIdentifier.getId())
.setAccessPolicy(toAccessPolicy(tableSignedIdentifier.getAccessPolicy()));
}
private AccessPolicy toAccessPolicy(TableAccessPolicy tableAccessPolicy) {
if (tableAccessPolicy == null) {
return null;
}
return new AccessPolicy()
.setExpiry(tableAccessPolicy.getExpiresOn())
.setStart(tableAccessPolicy.getStartsOn())
.setPermission(tableAccessPolicy.getPermissions());
}
/**
* Executes all {@link TableTransactionAction actions} within the list inside a transaction. When the call
* completes, either all {@link TableTransactionAction actions} in the transaction will succeed, or if a failure
* occurs, all {@link TableTransactionAction actions} in the transaction will be rolled back.
* {@link TableTransactionAction Actions} are executed sequantially. Each {@link TableTransactionAction action}
* must operate on a distinct row key. Attempting to pass multiple {@link TableTransactionAction actions} that
* share the same row key will cause an error.
*
* <p><strong>Code Samples</strong></p>
* <p>Submits a transaction that contains multiple {@link TableTransactionAction actions} to be applied to
* {@link TableEntity entities} on a table. Prints out details of each {@link TableTransactionAction action}'s
* {@link Response HTTP response}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.submitTransaction
* <pre>
* List<TableTransactionAction> transactionActions = new ArrayList<>&
*
* String partitionKey = "markers";
* String firstEntityRowKey = "m001";
* String secondEntityRowKey = "m002";
*
* TableEntity firstEntity = new TableEntity&
* .addProperty&
* .addProperty&
*
* transactionActions.add&
*
* System.out.printf&
* firstEntityRowKey&
*
* TableEntity secondEntity = new TableEntity&
* .addProperty&
* .addProperty&
*
* transactionActions.add&
*
* System.out.printf&
* secondEntityRowKey&
*
* TableTransactionResult tableTransactionResult = tableClient.submitTransaction&
*
* System.out.print&
*
* tableTransactionResult.getTransactionActionResponses&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.submitTransaction
* <p>Shows how to handle a transaction with a failing {@link TableTransactionAction action} via the provided
* {@link TableTransactionFailedException exception}, which contains the index of the first failing action in the
* transaction.</p>
* <!-- src_embed com.azure.data.tables.tableAsyncClient.submitTransactionWithError
* <pre>
*
* tableAsyncClient.submitTransaction&
* .contextWrite&
* .doOnError&
* &
* int failedActionIndex = e.getFailedTransactionActionIndex&
* &
* &
* transactionActions.remove&
* &
* &
* .subscribe&
* System.out.print&
*
* tableTransactionResult.getTransactionActionResponses&
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.data.tables.tableAsyncClient.submitTransactionWithError
*
* @param transactionActions A {@link List} of {@link TableTransactionAction actions} to perform on
* {@link TableEntity entities} in a table.
*
* @return A {@link List} of {@link TableTransactionActionResponse sub-responses} that correspond to each
* {@link TableTransactionResult action} in the transaction.
*
* @throws IllegalArgumentException If no {@link TableTransactionAction actions} have been added to the list.
* @throws TableServiceException If the request is rejected by the service.
* @throws TableTransactionFailedException If any {@link TableTransactionResult action} within the transaction
* fails. See the documentation for the client methods in {@link TableClient} to understand the conditions that
* may cause a given {@link TableTransactionAction action} to fail.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TableTransactionResult submitTransaction(List<TableTransactionAction> transactionActions) {
return submitTransactionWithResponse(transactionActions, null, null).getValue();
}
/**
* Executes all {@link TableTransactionAction actions} within the list inside a transaction. When the call
* completes, either all {@link TableTransactionAction actions} in the transaction will succeed, or if a failure
* occurs, all {@link TableTransactionAction actions} in the transaction will be rolled back.
* {@link TableTransactionAction Actions} are executed sequantially. Each {@link TableTransactionAction action}
* must operate on a distinct row key. Attempting to pass multiple {@link TableTransactionAction actions} that
* share the same row key will cause an error.
*
* <p><strong>Code Samples</strong></p>
* <p>Submits a transaction that contains multiple {@link TableTransactionAction actions} to be applied to
* {@link TableEntity entities} on a table. Prints out details of the {@link Response HTTP response} for the
* operation, as well as each {@link TableTransactionAction action}'s corresponding {@link Response HTTP
* response}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.submitTransactionWithResponse
* <pre>
* List<TableTransactionAction> myTransactionActions = new ArrayList<>&
*
* String myPartitionKey = "markers";
* String myFirstEntityRowKey = "m001";
* String mySecondEntityRowKey = "m002";
*
* TableEntity myFirstEntity = new TableEntity&
* .addProperty&
* .addProperty&
*
* myTransactionActions.add&
*
* System.out.printf&
* myFirstEntityRowKey&
*
* TableEntity mySecondEntity = new TableEntity&
* .addProperty&
* .addProperty&
*
* myTransactionActions.add&
*
* System.out.printf&
* mySecondEntityRowKey&
*
* Response<TableTransactionResult> response = tableClient.submitTransactionWithResponse&
* Duration.ofSeconds&
*
* System.out.printf&
* + " actions are:", response.getStatusCode&
*
* response.getValue&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.submitTransactionWithResponse
* <p>Shows how to handle a transaction with a failing {@link TableTransactionAction action} via the provided
* {@link TableTransactionFailedException exception}, which contains the index of the first failing action in the
* transaction.</p>
* <!-- src_embed com.azure.data.tables.tableClient.submitTransactionWithResponseWithError
* <pre>
* try &
* Response<TableTransactionResult> transactionResultResponse =
* tableClient.submitTransactionWithResponse&
* new Context&
*
* System.out.printf&
* + " submitted actions are:", transactionResultResponse.getStatusCode&
*
* transactionResultResponse.getValue&
* .forEach&
* System.out.printf&
* &
* &
* int failedActionIndex = e.getFailedTransactionActionIndex&
* &
* &
* myTransactionActions.remove&
* &
* &
* </pre>
* <!-- end com.azure.data.tables.tableClient.submitTransactionWithResponseWithError
*
* @param transactionActions A {@link List} of {@link TableTransactionAction transaction actions} to perform on
* {@link TableEntity entities} in a table.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return An {@link Response HTTP response} produced for the transaction itself. The response's value will contain
* a {@link List} of {@link TableTransactionActionResponse sub-responses} that correspond to each
* {@link TableTransactionAction action} in the transaction.
*
* @throws IllegalArgumentException If no {@link TableTransactionAction actions} have been added to the list.
* @throws TableServiceException If the request is rejected by the service.
* @throws TableTransactionFailedException If any {@link TableTransactionAction action} within the transaction
* fails. See the documentation for the client methods in {@link TableClient} to understand the conditions that
* may cause a given {@link TableTransactionAction action} to fail.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<TableTransactionResult> submitTransactionWithResponse(List<TableTransactionAction> transactionActions, Duration timeout, Context context) {
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
long timeoutInMillis = setTimeout(timeout);
Context contextValue = setContext(context);
if (transactionActions.isEmpty()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("A transaction must contain at least one operation."));
}
final List<TransactionalBatchAction> operations = new ArrayList<>();
for (TableTransactionAction transactionAction : transactionActions) {
switch (transactionAction.getActionType()) {
case CREATE:
operations.add(new TransactionalBatchAction.CreateEntity(transactionAction.getEntity()));
break;
case UPSERT_MERGE:
operations.add(new TransactionalBatchAction.UpsertEntity(transactionAction.getEntity(),
TableEntityUpdateMode.MERGE));
break;
case UPSERT_REPLACE:
operations.add(new TransactionalBatchAction.UpsertEntity(transactionAction.getEntity(),
TableEntityUpdateMode.REPLACE));
break;
case UPDATE_MERGE:
operations.add(new TransactionalBatchAction.UpdateEntity(transactionAction.getEntity(),
TableEntityUpdateMode.MERGE, transactionAction.getIfUnchanged()));
break;
case UPDATE_REPLACE:
operations.add(new TransactionalBatchAction.UpdateEntity(transactionAction.getEntity(),
TableEntityUpdateMode.REPLACE, transactionAction.getIfUnchanged()));
break;
case DELETE:
operations.add(
new TransactionalBatchAction.DeleteEntity(transactionAction.getEntity(),
transactionAction.getIfUnchanged()));
break;
default:
break;
}
}
Callable<Response<TableTransactionResult>> submitTransactionOp = () -> {
BiConsumer<TransactionalBatchRequestBody, RequestActionPair> accumulator = (body, pair) ->
body.addChangeOperation(new TransactionalBatchSubRequest(pair.getAction(), pair.getRequest()));
BiConsumer<TransactionalBatchRequestBody, TransactionalBatchRequestBody> combiner = (body1, body2) ->
body2.getContents().forEach(req -> body1.addChangeOperation((TransactionalBatchSubRequest) req));
TransactionalBatchRequestBody requestBody =
operations.stream()
.map(op -> new RequestActionPair(op.prepareRequest(transactionalBatchClient), op))
.collect(TransactionalBatchRequestBody::new, accumulator, combiner);
ResponseBase<TransactionalBatchSubmitBatchHeaders, TableTransactionActionResponse[]> response =
transactionalBatchImplementation
.submitTransactionalBatchWithRestResponse(requestBody, null, contextValue);
Response<List<TableTransactionActionResponse>> parsedResponse = parseResponse(requestBody, response);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
new TableTransactionResult(transactionActions, parsedResponse.getValue()));
};
ScheduledFuture<Response<TableTransactionResult>> scheduledFuture =
scheduler.schedule(submitTransactionOp, IMMEDIATELY, TimeUnit.SECONDS);
scheduler.shutdown();
try {
return scheduledFuture.get(timeoutInMillis, TimeUnit.MILLISECONDS);
}
catch (Exception ex) {
Exception exception = ex;
if (exception instanceof ExecutionException) {
exception = (Exception) exception.getCause();
}
if (exception.getCause() instanceof TableTransactionFailedException) {
throw logger.logExceptionAsError((TableTransactionFailedException) exception.getCause());
} else {
throw logger.logExceptionAsError((RuntimeException)(mapThrowableToTableServiceException(exception)));
}
}
}
private static class RequestActionPair {
private final HttpRequest request;
private final TransactionalBatchAction action;
public RequestActionPair(HttpRequest request, TransactionalBatchAction action) {
this.request = request;
this.action = action;
}
public HttpRequest getRequest() {
return request;
}
public TransactionalBatchAction getAction() {
return action;
}
}
private Response<List<TableTransactionActionResponse>> parseResponse(TransactionalBatchRequestBody requestBody,
ResponseBase<TransactionalBatchSubmitBatchHeaders, TableTransactionActionResponse[]> response) {
TableServiceError error = null;
String errorMessage = null;
TransactionalBatchChangeSet changes = null;
TransactionalBatchAction failedAction = null;
Integer failedIndex = null;
if (requestBody.getContents().get(0) instanceof TransactionalBatchChangeSet) {
changes = (TransactionalBatchChangeSet) requestBody.getContents().get(0);
}
for (int i = 0; i < response.getValue().length; i++) {
TableTransactionActionResponse subResponse = response.getValue()[i];
if (changes != null && changes.getContents().get(i) != null) {
ModelHelper.updateTableTransactionActionResponse(subResponse,
changes.getContents().get(i).getHttpRequest());
}
if (subResponse.getStatusCode() >= 400 && error == null && errorMessage == null) {
if (subResponse.getValue() instanceof TableServiceError) {
error = (TableServiceError) subResponse.getValue();
if (changes != null && error.getOdataError() != null
&& error.getOdataError().getMessage() != null
&& error.getOdataError().getMessage().getValue() != null) {
String message = error.getOdataError().getMessage().getValue();
try {
failedIndex = Integer.parseInt(message.substring(0, message.indexOf(":")));
failedAction = changes.getContents().get(failedIndex).getOperation();
} catch (NumberFormatException e) {
}
}
} else if (subResponse.getValue() instanceof String) {
errorMessage = "The service returned the following data for the failed operation: "
+ subResponse.getValue();
} else {
errorMessage =
"The service returned the following status code for the failed operation: "
+ subResponse.getStatusCode();
}
}
}
if (error != null || errorMessage != null) {
String message = "An action within the operation failed, the transaction has been rolled back.";
if (failedAction != null) {
message += " The failed operation was: " + failedAction;
} else if (errorMessage != null) {
message += " " + errorMessage;
}
throw logger.logExceptionAsError(new RuntimeException(
new TableTransactionFailedException(message, null, toTableServiceError(error), failedIndex)));
} else {
return new SimpleResponse<>(response, Arrays.asList(response.getValue()));
}
}
private String escapeSingleQuotes(String input) {
if (input == null) {
return null;
}
return input.replace("'", "''");
}
} | class TableClient {
private static final ExecutorService THREAD_POOL = TableUtils.getThreadPoolWithShutdownHook();
private static final String DELIMITER_CONTINUATION_TOKEN = ";";
private final ClientLogger logger = new ClientLogger(TableClient.class);
private final String tableName;
private final AzureTableImpl tablesImplementation;
private final TransactionalBatchImpl transactionalBatchImplementation;
private final String accountName;
private final String tableEndpoint;
private final HttpPipeline pipeline;
private final TableClient transactionalBatchClient;
TableClient(String tableName, HttpPipeline pipeline, String serviceUrl, TableServiceVersion serviceVersion,
SerializerAdapter tablesSerializer, SerializerAdapter transactionalBatchSerializer) {
try {
if (tableName == null) {
throw new NullPointerException(("'tableName' must not be null to create TableClient."));
}
if (tableName.isEmpty()) {
throw new IllegalArgumentException("'tableName' must not be empty to create a TableClient.");
}
final URI uri = URI.create(serviceUrl);
this.accountName = uri.getHost().split("\\.", 2)[0];
this.tableEndpoint = uri.resolve("/" + tableName).toString();
logger.verbose("Table Service URI: {}", uri);
} catch (NullPointerException | IllegalArgumentException ex) {
throw logger.logExceptionAsError(ex);
}
this.tablesImplementation = new AzureTableImplBuilder()
.url(serviceUrl)
.serializerAdapter(tablesSerializer)
.pipeline(pipeline)
.version(serviceVersion.getVersion())
.buildClient();
this.transactionalBatchImplementation =
new TransactionalBatchImpl(tablesImplementation, transactionalBatchSerializer);
this.tableName = tableName;
this.pipeline = tablesImplementation.getHttpPipeline();
this.transactionalBatchClient = new TableClient(this, serviceVersion, tablesSerializer);
}
TableClient(TableClient client, ServiceVersion serviceVersion, SerializerAdapter tablesSerializer) {
this.accountName = client.getAccountName();
this.tableEndpoint = client.getTableEndpoint();
this.pipeline = BuilderHelper.buildNullClientPipeline();
this.tablesImplementation = new AzureTableImplBuilder()
.url(client.getTablesImplementation().getUrl())
.serializerAdapter(tablesSerializer)
.pipeline(this.pipeline)
.version(serviceVersion.getVersion())
.buildClient();
this.tableName = client.getTableName();
this.transactionalBatchImplementation = null;
this.transactionalBatchClient = null;
}
/**
* Gets the name of the table.
*
* @return The name of the table.
*/
public String getTableName() {
return tableName;
}
/**
* Gets the name of the account containing the table.
*
* @return The name of the account containing the table.
*/
public String getAccountName() {
return accountName;
}
/**
* Gets the endpoint for this table.
*
* @return The endpoint for this table.
*/
public String getTableEndpoint() {
return tableEndpoint;
}
HttpPipeline getHttpPipeline() {
return this.pipeline;
}
/**
* Gets the {@link AzureTableImpl} powering this client.
*
* @return This client's {@link AzureTableImpl}.
*/
AzureTableImpl getTablesImplementation() {
return tablesImplementation;
}
/**
* Gets the REST API version used by this client.
*
* @return The REST API version used by this client.
*/
public TableServiceVersion getServiceVersion() {
return TableServiceVersion.fromString(tablesImplementation.getVersion());
}
/**
* Generates a service SAS for the table using the specified {@link TableSasSignatureValues}.
*
* <p><strong>Note:</strong> The client must be authenticated via {@link AzureNamedKeyCredential}.</p>
* <p>See {@link TableSasSignatureValues} for more information on how to construct a service SAS.</p>
*
* @param tableSasSignatureValues {@link TableSasSignatureValues}.
*
* @return A {@code String} representing the SAS query parameters.
*
* @throws IllegalStateException If this {@link TableClient} is not authenticated with an
* {@link AzureNamedKeyCredential}.
*/
public String generateSas(TableSasSignatureValues tableSasSignatureValues) {
AzureNamedKeyCredential azureNamedKeyCredential = TableSasUtils.extractNamedKeyCredential(getHttpPipeline());
if (azureNamedKeyCredential == null) {
throw logger.logExceptionAsError(new IllegalStateException("Cannot generate a SAS token with a client that"
+ " is not authenticated with an AzureNamedKeyCredential."));
}
return new TableSasGenerator(tableSasSignatureValues, getTableName(), azureNamedKeyCredential).getSas();
}
/**
* Creates the table within the Tables service.
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a table. Prints out the details of the created table.</p>
* <!-- src_embed com.azure.data.tables.tableClient.createTable -->
* <pre>
* TableItem tableItem = tableClient.createTable&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.createTable -->
*
* @return A {@link TableItem} that represents the table.
*
* @throws TableServiceException If a table with the same name already exists within the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TableItem createTable() {
return createTableWithResponse(null, null).getValue();
}
/**
* Creates the table within the Tables service.
*
* <p><strong>Code Samples</strong></p>
* <p>Creates a table. Prints out the details of the {@link Response HTTP response} and the created table.</p>
* <!-- src_embed com.azure.data.tables.tableClient.createTableWithResponse
* <pre>
* Response<TableItem> response = tableClient.createTableWithResponse&
* new Context&
*
* System.out.printf&
* response.getStatusCode&
* </pre>
* <!-- end com.azure.data.tables.tableClient.createTableWithResponse
*
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response} containing a {@link TableItem} that represents the table.
*
* @throws TableServiceException If a table with the same name already exists within the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Deletes the table within the Tables service.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes a table.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteTable -->
* <pre>
* tableClient.deleteTable&
*
* System.out.print&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteTable -->
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void deleteTable() {
deleteTableWithResponse(null, null);
}
/**
* Deletes the table within the Tables service.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes a table. Prints out the details of the {@link Response HTTP response}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteTableWithResponse
* <pre>
* Response<Void> response = tableClient.deleteTableWithResponse&
* new Context&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteTableWithResponse
*
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteTableWithResponse(Duration timeout, Context context) {
Context contextValue = TableUtils.setContext(context, true);
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
Callable<Response<Void>> callable = () ->
new SimpleResponse<>(tablesImplementation.getTables().deleteWithResponse(
tableName, null, contextValue),
null);
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
Throwable except = mapThrowableToTableServiceException(ex);
return swallow404Exception(except);
}
}
private Response<Void> swallow404Exception(Throwable ex) {
if (ex instanceof TableServiceException
&&
((TableServiceException) ex).getResponse().getStatusCode() == 404) {
return new SimpleResponse<>(
((TableServiceException) ex).getResponse().getRequest(),
((TableServiceException) ex).getResponse().getStatusCode(),
((TableServiceException) ex).getResponse().getHeaders(),
null);
} else {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Inserts an {@link TableEntity entity} into the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Inserts an {@link TableEntity entity} into the table. Prints out the details of the created
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.createEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* TableEntity tableEntity = new TableEntity&
* .addProperty&
*
* tableClient.createEntity&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.createEntity
*
* @param entity The {@link TableEntity entity} to insert.
*
* @throws TableServiceException If an {@link TableEntity entity} with the same partition key and row key already
* exists within the table.
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void createEntity(TableEntity entity) {
createEntityWithResponse(entity, null, null);
}
/**
* Inserts an {@link TableEntity entity} into the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Inserts an {@link TableEntity entity} into the table. Prints out the details of the
* {@link Response HTTP response} and the created {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.createEntityWithResponse
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* TableEntity myTableEntity = new TableEntity&
* .addProperty&
*
* Response<Void> response = tableClient.createEntityWithResponse&
* new Context&
*
* System.out.printf&
* + " '%s' was created.", response.getStatusCode&
* </pre>
* <!-- end com.azure.data.tables.tableClient.createEntityWithResponse
*
* @param entity The {@link TableEntity entity} to insert.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws TableServiceException If an {@link TableEntity entity} with the same partition key and row key already
* exists within the table.
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> createEntityWithResponse(TableEntity entity, Duration timeout, Context context) {
Context contextValue = TableUtils.setContext(context, true);
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
if (entity == null) {
throw logger.logExceptionAsError(new IllegalArgumentException("'entity' cannot be null."));
}
EntityHelper.setPropertiesFromGetters(entity, logger);
Callable<Response<Void>> callable = () -> {
Response<Map<String, Object>> response = tablesImplementation.getTables().insertEntityWithResponse(
tableName, null, null, ResponseFormat.RETURN_NO_CONTENT,
entity.getProperties(), null, contextValue);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null);
};
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Inserts an {@link TableEntity entity} into the table if it does not exist, or merges the
* {@link TableEntity entity} with the existing {@link TableEntity entity} otherwise.
*
* <p><strong>Code Samples</strong></p>
* <p>Upserts an {@link TableEntity entity} into the table. Prints out the details of the upserted
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.upsertEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* TableEntity tableEntity = new TableEntity&
* .addProperty&
*
* tableClient.upsertEntity&
*
* System.out.printf&
* rowKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.upsertEntity
*
* @param entity The {@link TableEntity entity} to upsert.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void upsertEntity(TableEntity entity) {
upsertEntityWithResponse(entity, null, null, null);
}
/**
* Inserts an {@link TableEntity entity} into the table if it does not exist, or updates the existing
* {@link TableEntity entity} using the specified {@link TableEntityUpdateMode update mode} otherwise. The default
* {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
*
* <p>When the {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will be merged into the existing {@link TableEntity entity}. When the
* {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will completely replace those in the existing {@link TableEntity
* entity}.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Upserts an {@link TableEntity entity} into the table with the specified
* {@link TableEntityUpdateMode update mode} if said {@link TableEntity entity} already exists. Prints out the
* details of the {@link Response HTTP response} and the upserted {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.upsertEntityWithResponse
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* TableEntity myTableEntity = new TableEntity&
* .addProperty&
*
* Response<Void> response = tableClient.upsertEntityWithResponse&
* Duration.ofSeconds&
*
* System.out.printf&
* + " '%s' was updated&
* </pre>
* <!-- end com.azure.data.tables.tableClient.upsertEntityWithResponse
*
* @param entity The {@link TableEntity entity} to upsert.
* @param updateMode The type of update to perform if the {@link TableEntity entity} already exits.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> upsertEntityWithResponse(TableEntity entity, TableEntityUpdateMode updateMode,
Duration timeout, Context context) {
Context contextValue = TableUtils.setContext(context, true);
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
if (entity == null) {
throw logger.logExceptionAsError(new IllegalArgumentException("'entity' cannot be null."));
}
String partitionKey = TableUtils.escapeSingleQuotes(entity.getPartitionKey());
String rowKey = TableUtils.escapeSingleQuotes(entity.getRowKey());
EntityHelper.setPropertiesFromGetters(entity, logger);
Callable<Response<Void>> callable = () -> {
if (updateMode == TableEntityUpdateMode.REPLACE) {
return tablesImplementation.getTables().updateEntityWithResponse(
tableName, partitionKey, rowKey, null, null, null,
entity.getProperties(), null, contextValue);
} else {
return tablesImplementation.getTables().mergeEntityWithResponse(
tableName, partitionKey, rowKey, null, null, null,
entity.getProperties(), null, contextValue);
}
};
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Updates an existing {@link TableEntity entity} by merging the provided {@link TableEntity entity} with the
* existing {@link TableEntity entity}.
*
* <p><strong>Code Samples</strong></p>
* <p>Updates a {@link TableEntity entity} on the table. Prints out the details of the updated
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.updateEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* TableEntity tableEntity = new TableEntity&
* .addProperty&
*
* tableClient.updateEntity&
*
* System.out.printf&
* rowKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.updateEntity
*
* @param entity The {@link TableEntity entity} to update.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If no {@link TableEntity entity} with the same partition key and row key exists
* within the table.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void updateEntity(TableEntity entity) {
updateEntity(entity, null);
}
/**
* Updates an existing {@link TableEntity entity} using the specified {@link TableEntityUpdateMode update mode}.
* The default {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
*
* <p>When the {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will be merged into the existing {@link TableEntity entity}. When the
* {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will completely replace those in the existing {@link TableEntity entity}.
* </p>
*
* <p><strong>Code Samples</strong></p>
* <p>Updates a {@link TableEntity entity} on the table with the specified
* {@link TableEntityUpdateMode update mode}. Prints out the details of the updated {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.updateEntity
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* TableEntity myTableEntity = new TableEntity&
* .addProperty&
*
* tableClient.updateEntity&
*
* System.out.printf&
* rowKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.updateEntity
*
* @param entity The {@link TableEntity entity} to update.
* @param updateMode The type of update to perform.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If no {@link TableEntity entity} with the same partition key and row key exists
* within the table.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void updateEntity(TableEntity entity, TableEntityUpdateMode updateMode) {
updateEntityWithResponse(entity, updateMode, false, null, null);
}
/**
* Updates an existing {@link TableEntity entity} using the specified {@link TableEntityUpdateMode update mode}.
* The default {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
*
* <p>When the {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will be merged into the existing {@link TableEntity entity}. When the
* {@link TableEntityUpdateMode update mode} is {@link TableEntityUpdateMode
* {@link TableEntity entity}'s properties will completely replace those in the existing {@link TableEntity entity}.
* </p>
*
* <p><strong>Code Samples</strong></p>
* <p>Updates a {@link TableEntity entity} on the table with the specified {@link TableEntityUpdateMode update
* mode}
* if the {@code ETags} on both {@link TableEntity entities} match. Prints out the details of the
* {@link Response HTTP response} updated {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.updateEntityWithResponse
* <pre>
* String somePartitionKey = "partitionKey";
* String someRowKey = "rowKey";
*
* TableEntity someTableEntity = new TableEntity&
* .addProperty&
*
* Response<Void> response = tableClient.updateEntityWithResponse&
* true, Duration.ofSeconds&
*
* System.out.printf&
* + " '%s' was updated.", response.getStatusCode&
* </pre>
* <!-- end com.azure.data.tables.tableClient.updateEntityWithResponse
*
* @param entity The {@link TableEntity entity} to update.
* @param updateMode The type of update to perform.
* @param ifUnchanged When true, the ETag of the provided {@link TableEntity entity} must match the ETag of the
* {@link TableEntity entity} in the Table service. If the values do not match, the update will not occur and an
* exception will be thrown.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws IllegalArgumentException If the provided {@link TableEntity entity} is {@code null}.
* @throws TableServiceException If no {@link TableEntity entity} with the same partition key and row key exists
* within the table, or if {@code ifUnchanged} is {@code true} and the existing {@link TableEntity entity}'s ETag
* does not match that of the provided {@link TableEntity entity}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> updateEntityWithResponse(TableEntity entity, TableEntityUpdateMode updateMode,
boolean ifUnchanged, Duration timeout, Context context) {
Context contextValue = TableUtils.setContext(context, true);
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
if (entity == null) {
throw logger.logExceptionAsError(new IllegalArgumentException("'entity' cannot be null."));
}
String partitionKey = TableUtils.escapeSingleQuotes(entity.getPartitionKey());
String rowKey = TableUtils.escapeSingleQuotes(entity.getRowKey());
String eTag = ifUnchanged ? entity.getETag() : "*";
EntityHelper.setPropertiesFromGetters(entity, logger);
Callable<Response<Void>> callable = () -> {
if (updateMode == TableEntityUpdateMode.REPLACE) {
return tablesImplementation.getTables()
.updateEntityWithResponse(tableName, partitionKey, rowKey, null, null, eTag,
entity.getProperties(), null, contextValue);
} else {
return tablesImplementation.getTables()
.mergeEntityWithResponse(tableName, partitionKey, rowKey, null, null, eTag,
entity.getProperties(), null, contextValue);
}
};
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Deletes an {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes an {@link TableEntity entity} on the table. Prints out the entity's {@code partitionKey} and
* {@code rowKey}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* tableClient.deleteEntity&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteEntity
*
* @param partitionKey The partition key of the {@link TableEntity entity}.
* @param rowKey The row key of the {@link TableEntity entity}.
*
* @throws IllegalArgumentException If the provided {@code partitionKey} or {@code rowKey} are {@code null} or
* empty.
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void deleteEntity(String partitionKey, String rowKey) {
deleteEntityWithResponse(partitionKey, rowKey, null, false, null, null);
}
/**
* Deletes an {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes a {@link TableEntity entity} on the table. Prints out the details of the deleted
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteEntity
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* TableEntity myTableEntity = new TableEntity&
* .addProperty&
*
* tableClient.deleteEntity&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteEntity
*
* @param entity The {@link TableEntity entity} to delete.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void deleteEntity(TableEntity entity) {
deleteEntityWithResponse(entity, false, null, null);
}
/**
* Deletes an {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Deletes a {@link TableEntity entity} on the table. Prints out the details of the
* {@link Response HTTP response} and the deleted {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.deleteEntityWithResponse
* <pre>
* String somePartitionKey = "partitionKey";
* String someRowKey = "rowKey";
*
* TableEntity someTableEntity = new TableEntity&
* .addProperty&
*
* Response<Void> response = tableClient.deleteEntityWithResponse&
* new Context&
*
* System.out.printf&
* + " '%s' was deleted.", response.getStatusCode&
* </pre>
* <!-- end com.azure.data.tables.tableClient.deleteEntityWithResponse
*
* @param entity The table {@link TableEntity entity} to delete.
* @param ifUnchanged When true, the ETag of the provided {@link TableEntity entity} must match the ETag of the
* {@link TableEntity entity} in the Table service. If the values do not match, the update will not occur and an
* exception will be thrown.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> deleteEntityWithResponse(TableEntity entity, boolean ifUnchanged, Duration timeout,
Context context) {
return deleteEntityWithResponse(
entity.getPartitionKey(), entity.getRowKey(), entity.getETag(), ifUnchanged, timeout, context);
}
private Response<Void> deleteEntityWithResponse(String partitionKey, String rowKey, String eTag, boolean ifUnchanged,
Duration timeout, Context context) {
Context contextValue = TableUtils.setContext(context, true);
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
String finalETag = ifUnchanged ? eTag : "*";
if (isNullOrEmpty(partitionKey) || isNullOrEmpty(rowKey)) {
throw logger.logExceptionAsError(new IllegalArgumentException("'partitionKey' and 'rowKey' cannot be null"));
}
Callable<Response<Void>> callable = () -> tablesImplementation.getTables().deleteEntityWithResponse(
tableName, TableUtils.escapeSingleQuotes(partitionKey), TableUtils.escapeSingleQuotes(rowKey), finalETag, null,
null, null, contextValue);
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
Throwable except = mapThrowableToTableServiceException(ex);
return swallow404Exception(except);
}
}
/**
* Lists all {@link TableEntity entities} within the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Lists all {@link TableEntity entities} on the table. Prints out the details of the
* retrieved {@link TableEntity entities}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.listEntities -->
* <pre>
* PagedIterable<TableEntity> tableEntities = tableClient.listEntities&
*
* tableEntities.forEach&
* System.out.printf&
* tableEntity.getPartitionKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.listEntities -->
*
* @return A {@link PagedIterable} containing all {@link TableEntity entities} within the table.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<TableEntity> listEntities() {
return listEntities(new ListEntitiesOptions(), null, null);
}
/**
* Lists {@link TableEntity entities} using the parameters in the provided options.
*
* <p>If the {@code filter} parameter in the options is set, only {@link TableEntity entities} matching the filter
* will be returned. If the {@code select} parameter is set, only the properties included in the select parameter
* will be returned for each {@link TableEntity entity}. If the {@code top} parameter is set, the maximum number of
* returned {@link TableEntity entities} per page will be limited to that value.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Lists all {@link TableEntity entities} on the table. Prints out the details of the
* {@link Response HTTP response} and all the retrieved {@link TableEntity entities}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.listEntities
* <pre>
* List<String> propertiesToSelect = new ArrayList<>&
* propertiesToSelect.add&
* propertiesToSelect.add&
* propertiesToSelect.add&
*
* ListEntitiesOptions listEntitiesOptions = new ListEntitiesOptions&
* .setTop&
* .setFilter&
* .setSelect&
*
* PagedIterable<TableEntity> myTableEntities = tableClient.listEntities&
* Duration.ofSeconds&
*
* myTableEntities.forEach&
* System.out.printf&
* tableEntity.getPartitionKey&
*
* tableEntity.getProperties&
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.data.tables.tableClient.listEntities
*
* @param options The {@code filter}, {@code select}, and {@code top} OData query options to apply to this
* operation.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return A {@link PagedIterable} containing matching {@link TableEntity entities} within the table.
*
* @throws IllegalArgumentException If one or more of the OData query options in {@code options} is malformed.
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<TableEntity> listEntities(ListEntitiesOptions options, Duration timeout, Context context) {
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
Callable<PagedIterable<TableEntity>> callable = () -> new PagedIterable<>(
() -> listEntitiesFirstPage(context, options, TableEntity.class),
token -> listEntitiesNextPage(token, context, options, TableEntity.class));
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
private <T extends TableEntity> PagedResponse<T> listEntitiesFirstPage(Context context,
ListEntitiesOptions options,
Class<T> resultType) {
return listEntities(null, null, context, options, resultType);
}
private <T extends TableEntity> PagedResponse<T> listEntitiesNextPage(String token, Context context,
ListEntitiesOptions options,
Class<T> resultType) {
if (token == null) {
return null;
}
String[] split = token.split(DELIMITER_CONTINUATION_TOKEN, 2);
if (split.length != 2) {
throw logger.logExceptionAsError(new RuntimeException(
"Split done incorrectly, must have partition and row key: " + token));
}
String nextPartitionKey = split[0];
String nextRowKey = split[1];
return listEntities(nextPartitionKey, nextRowKey, context, options, resultType);
}
private <T extends TableEntity> PagedResponse<T> listEntities(String nextPartitionKey, String nextRowKey,
Context context, ListEntitiesOptions options,
Class<T> resultType) {
Context contextValue = TableUtils.setContext(context, true);
String select = null;
if (options.getSelect() != null) {
select = String.join(",", options.getSelect());
}
QueryOptions queryOptions = new QueryOptions()
.setFilter(options.getFilter())
.setTop(options.getTop())
.setSelect(select)
.setFormat(OdataMetadataFormat.APPLICATION_JSON_ODATA_FULLMETADATA);
final ResponseBase<TablesQueryEntitiesHeaders, TableEntityQueryResponse> response =
tablesImplementation.getTables().queryEntitiesWithResponse(tableName, null, null,
nextPartitionKey, nextRowKey, queryOptions, contextValue);
final TableEntityQueryResponse tablesQueryEntityResponse = response.getValue();
if (tablesQueryEntityResponse == null) {
return null;
}
final List<Map<String, Object>> entityResponseValue = tablesQueryEntityResponse.getValue();
if (entityResponseValue == null) {
return null;
}
final List<T> entities = entityResponseValue.stream()
.map(ModelHelper::createEntity)
.map(e -> EntityHelper.convertToSubclass(e, resultType, logger))
.collect(Collectors.toList());
return new EntityPaged<>(response, entities,
response.getDeserializedHeaders().getXMsContinuationNextPartitionKey(),
response.getDeserializedHeaders().getXMsContinuationNextRowKey());
}
/**
* Gets a single {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets an {@link TableEntity entity} on the table. Prints out the details of the retrieved
* {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.getEntity
* <pre>
* String partitionKey = "partitionKey";
* String rowKey = "rowKey";
*
* TableEntity tableEntity = tableClient.getEntity&
*
* System.out.printf&
* tableEntity.getRowKey&
* </pre>
* <!-- end com.azure.data.tables.tableClient.getEntity
*
* @param partitionKey The partition key of the {@link TableEntity entity}.
* @param rowKey The partition key of the {@link TableEntity entity}.
*
* @return The {@link TableEntity entity}.
*
* @throws IllegalArgumentException If the provided {@code partitionKey} or {@code rowKey} are {@code null} or
* empty.
* @throws TableServiceException If no {@link TableEntity entity} with the provided {@code partitionKey} and
* {@code rowKey} exists within the table.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TableEntity getEntity(String partitionKey, String rowKey) {
return getEntityWithResponse(partitionKey, rowKey, null, null, null).getValue();
}
/**
* Gets a single {@link TableEntity entity} from the table.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets an {@link TableEntity entity} on the table. Prints out the details of the {@link Response HTTP response}
* retrieved {@link TableEntity entity}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.getEntityWithResponse
* <pre>
* String myPartitionKey = "partitionKey";
* String myRowKey = "rowKey";
*
* List<String> propertiesToSelect = new ArrayList<>&
* propertiesToSelect.add&
* propertiesToSelect.add&
* propertiesToSelect.add&
*
* Response<TableEntity> response = tableClient.getEntityWithResponse&
* Duration.ofSeconds&
*
* TableEntity myTableEntity = response.getValue&
*
* System.out.printf&
* + " '%s' and properties:", response.getStatusCode&
* myTableEntity.getRowKey&
*
* myTableEntity.getProperties&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.getEntityWithResponse
*
* @param partitionKey The partition key of the {@link TableEntity entity}.
* @param rowKey The partition key of the {@link TableEntity entity}.
* @param select A list of properties to select on the {@link TableEntity entity}.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response} containing the {@link TableEntity entity}.
*
* @throws IllegalArgumentException If the provided {@code partitionKey} or {@code rowKey} are {@code null} or
* empty, or if the {@code select} OData query option is malformed.
* @throws TableServiceException If no {@link TableEntity entity} with the provided {@code partitionKey} and
* {@code rowKey} exists within the table.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<TableEntity> getEntityWithResponse(String partitionKey, String rowKey, List<String> select,
Duration timeout, Context context) {
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
Context contextValue = TableUtils.setContext(context, true);
QueryOptions queryOptions = new QueryOptions()
.setFormat(OdataMetadataFormat.APPLICATION_JSON_ODATA_FULLMETADATA);
if (select != null) {
queryOptions.setSelect(String.join(",", select));
}
if (isNullOrEmpty(partitionKey) || isNullOrEmpty(rowKey)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("'partitionKey' and 'rowKey' cannot be null."));
}
Callable<Response<TableEntity>> callable = () -> {
ResponseBase<TablesQueryEntityWithPartitionAndRowKeyHeaders, Map<String, Object>> response =
tablesImplementation.getTables().queryEntityWithPartitionAndRowKeyWithResponse(
tableName, TableUtils.escapeSingleQuotes(partitionKey), TableUtils.escapeSingleQuotes(rowKey), null, null,
queryOptions, contextValue);
final Map<String, Object> matchingEntity = response.getValue();
if (matchingEntity == null || matchingEntity.isEmpty()) {
logger.info("There was no matching entity. Table {}, partition key: {}, row key: {}.",
tableName, partitionKey, rowKey);
return null;
}
final TableEntity entity = ModelHelper.createEntity(matchingEntity);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
EntityHelper.convertToSubclass(entity, TableEntity.class, logger));
};
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Retrieves details about any stored {@link TableAccessPolicies access policies} specified on the table that may
* be used with Shared Access Signatures.
*
* <p>This operation is only supported on Azure Storage endpoints.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Gets a table's {@link TableAccessPolicies access policies}. Prints out the details of the retrieved
* {@link TableAccessPolicies access policies}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.getAccessPolicies -->
* <pre>
* TableAccessPolicies accessPolicies = tableClient.getAccessPolicies&
*
* accessPolicies.getIdentifiers&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.getAccessPolicies -->
*
* @return The table's {@link TableAccessPolicies access policies}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TableAccessPolicies getAccessPolicies() {
return getAccessPoliciesWithResponse(null, null).getValue();
}
/**
* Retrieves details about any stored {@link TableAccessPolicies access policies} specified on the table that may be
* used with Shared Access Signatures.
*
* <p>This operation is only supported on Azure Storage endpoints.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Gets a table's {@link TableAccessPolicies access policies}. Prints out the details of the
* {@link Response HTTP response} and the retrieved {@link TableAccessPolicies access policies}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.getAccessPoliciesWithResponse
* <pre>
* List<String> propertiesToSelect = new ArrayList<>&
* propertiesToSelect.add&
* propertiesToSelect.add&
* propertiesToSelect.add&
*
* Response<TableAccessPolicies> response = tableClient.getAccessPoliciesWithResponse&
* new Context&
*
* System.out.printf&
* + " IDs:", response.getStatusCode&
*
* response.getValue&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.getAccessPoliciesWithResponse
*
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return An {@link Response HTTP response} containing the table's {@link TableAccessPolicies access policies}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<TableAccessPolicies> getAccessPoliciesWithResponse(Duration timeout, Context context) {
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
Context contextValue = TableUtils.setContext(context, true);
Callable<Response<TableAccessPolicies>> callable = () -> {
ResponseBase<TablesGetAccessPolicyHeaders, List<SignedIdentifier>> response =
tablesImplementation.getTables().getAccessPolicyWithResponse(
tableName, null, null, contextValue
);
return new SimpleResponse<>(response,
new TableAccessPolicies(response.getValue() == null ? null : response.getValue().stream()
.map(TableUtils::toTableSignedIdentifier)
.collect(Collectors.toList())));
};
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Sets stored {@link TableAccessPolicies access policies} for the table that may be used with Shared Access
* Signatures.
*
* <p>This operation is only supported on Azure Storage endpoints.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Sets stored {@link TableAccessPolicies access policies} on a table.</p>
* <!-- src_embed com.azure.data.tables.tableClient.setAccessPolicies
* <pre>
* List<TableSignedIdentifier> signedIdentifiers = new ArrayList<>&
*
* signedIdentifiers.add&
* .setAccessPolicy&
* .setStartsOn&
* .setExpiresOn&
* .setPermissions&
* signedIdentifiers.add&
* .setAccessPolicy&
* .setStartsOn&
* .setExpiresOn&
* .setPermissions&
*
* tableClient.setAccessPolicies&
*
* System.out.print&
* </pre>
* <!-- end com.azure.data.tables.tableClient.setAccessPolicies
*
* @param tableSignedIdentifiers The {@link TableSignedIdentifier access policies} for the table.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void setAccessPolicies(List<TableSignedIdentifier> tableSignedIdentifiers) {
setAccessPoliciesWithResponse(tableSignedIdentifiers, null, null);
}
/**
* Sets stored {@link TableAccessPolicies access policies} for the table that may be used with Shared Access
* Signatures.
*
* <p>This operation is only supported on Azure Storage endpoints.</p>
*
* <p><strong>Code Samples</strong></p>
* <p>Sets stored {@link TableAccessPolicies access policies} on a table. Prints out details of the
* {@link Response HTTP response}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.setAccessPoliciesWithResponse
* <pre>
* List<TableSignedIdentifier> mySignedIdentifiers = new ArrayList<>&
*
* mySignedIdentifiers.add&
* .setAccessPolicy&
* .setStartsOn&
* .setExpiresOn&
* .setPermissions&
* mySignedIdentifiers.add&
* .setAccessPolicy&
* .setStartsOn&
* .setExpiresOn&
* .setPermissions&
*
* Response<Void> response = tableClient.setAccessPoliciesWithResponse&
* new Context&
*
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.setAccessPoliciesWithResponse
*
* @param tableSignedIdentifiers The {@link TableSignedIdentifier access policies} for the table.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return The {@link Response HTTP response}.
*
* @throws TableServiceException If the request is rejected by the service.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<Void> setAccessPoliciesWithResponse(List<TableSignedIdentifier> tableSignedIdentifiers,
Duration timeout, Context context) {
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
Context contextValue = TableUtils.setContext(context, true);
List<SignedIdentifier> signedIdentifiers = null;
if (tableSignedIdentifiers != null) {
signedIdentifiers = tableSignedIdentifiers.stream()
.map(tableSignedIdentifier -> {
SignedIdentifier signedIdentifier = TableUtils.toSignedIdentifier(tableSignedIdentifier);
if (signedIdentifier != null) {
if (signedIdentifier.getAccessPolicy() != null
&& signedIdentifier.getAccessPolicy().getStart() != null) {
signedIdentifier.getAccessPolicy()
.setStart(signedIdentifier.getAccessPolicy()
.getStart().truncatedTo(ChronoUnit.SECONDS));
}
if (signedIdentifier.getAccessPolicy() != null
&& signedIdentifier.getAccessPolicy().getExpiry() != null) {
signedIdentifier.getAccessPolicy()
.setExpiry(signedIdentifier.getAccessPolicy()
.getExpiry().truncatedTo(ChronoUnit.SECONDS));
}
}
return signedIdentifier;
})
.collect(Collectors.toList());
}
List<SignedIdentifier> finalSignedIdentifiers = signedIdentifiers;
Callable<Response<Void>> callable = () -> {
ResponseBase<TablesSetAccessPolicyHeaders, Void> response = tablesImplementation.getTables()
.setAccessPolicyWithResponse(tableName, null, null,
finalSignedIdentifiers, contextValue);
return new SimpleResponse<>(response, response.getValue());
};
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) (TableUtils.mapThrowableToTableServiceException(ex)));
}
}
/**
* Executes all {@link TableTransactionAction actions} within the list inside a transaction. When the call
* completes, either all {@link TableTransactionAction actions} in the transaction will succeed, or if a failure
* occurs, all {@link TableTransactionAction actions} in the transaction will be rolled back.
* {@link TableTransactionAction Actions} are executed sequantially. Each {@link TableTransactionAction action}
* must operate on a distinct row key. Attempting to pass multiple {@link TableTransactionAction actions} that
* share the same row key will cause an error.
*
* <p><strong>Code Samples</strong></p>
* <p>Submits a transaction that contains multiple {@link TableTransactionAction actions} to be applied to
* {@link TableEntity entities} on a table. Prints out details of each {@link TableTransactionAction action}'s
* {@link Response HTTP response}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.submitTransaction
* <pre>
* List<TableTransactionAction> transactionActions = new ArrayList<>&
*
* String partitionKey = "markers";
* String firstEntityRowKey = "m001";
* String secondEntityRowKey = "m002";
*
* TableEntity firstEntity = new TableEntity&
* .addProperty&
* .addProperty&
*
* transactionActions.add&
*
* System.out.printf&
* firstEntityRowKey&
*
* TableEntity secondEntity = new TableEntity&
* .addProperty&
* .addProperty&
*
* transactionActions.add&
*
* System.out.printf&
* secondEntityRowKey&
*
* TableTransactionResult tableTransactionResult = tableClient.submitTransaction&
*
* System.out.print&
*
* tableTransactionResult.getTransactionActionResponses&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.submitTransaction
* <p>Shows how to handle a transaction with a failing {@link TableTransactionAction action} via the provided
* {@link TableTransactionFailedException exception}, which contains the index of the first failing action in the
* transaction.</p>
* <!-- src_embed com.azure.data.tables.tableAsyncClient.submitTransactionWithError
* <pre>
*
* tableAsyncClient.submitTransaction&
* .contextWrite&
* .doOnError&
* &
* int failedActionIndex = e.getFailedTransactionActionIndex&
* &
* &
* transactionActions.remove&
* &
* &
* .subscribe&
* System.out.print&
*
* tableTransactionResult.getTransactionActionResponses&
* System.out.printf&
* &
* </pre>
* <!-- end com.azure.data.tables.tableAsyncClient.submitTransactionWithError
*
* @param transactionActions A {@link List} of {@link TableTransactionAction actions} to perform on
* {@link TableEntity entities} in a table.
*
* @return A {@link List} of {@link TableTransactionActionResponse sub-responses} that correspond to each
* {@link TableTransactionResult action} in the transaction.
*
* @throws IllegalArgumentException If no {@link TableTransactionAction actions} have been added to the list.
* @throws TableServiceException If the request is rejected by the service.
* @throws TableTransactionFailedException If any {@link TableTransactionResult action} within the transaction
* fails. See the documentation for the client methods in {@link TableClient} to understand the conditions that
* may cause a given {@link TableTransactionAction action} to fail.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public TableTransactionResult submitTransaction(List<TableTransactionAction> transactionActions) {
return submitTransactionWithResponse(transactionActions, null, null).getValue();
}
/**
* Executes all {@link TableTransactionAction actions} within the list inside a transaction. When the call
* completes, either all {@link TableTransactionAction actions} in the transaction will succeed, or if a failure
* occurs, all {@link TableTransactionAction actions} in the transaction will be rolled back.
* {@link TableTransactionAction Actions} are executed sequantially. Each {@link TableTransactionAction action}
* must operate on a distinct row key. Attempting to pass multiple {@link TableTransactionAction actions} that
* share the same row key will cause an error.
*
* <p><strong>Code Samples</strong></p>
* <p>Submits a transaction that contains multiple {@link TableTransactionAction actions} to be applied to
* {@link TableEntity entities} on a table. Prints out details of the {@link Response HTTP response} for the
* operation, as well as each {@link TableTransactionAction action}'s corresponding {@link Response HTTP
* response}.</p>
* <!-- src_embed com.azure.data.tables.tableClient.submitTransactionWithResponse
* <pre>
* List<TableTransactionAction> myTransactionActions = new ArrayList<>&
*
* String myPartitionKey = "markers";
* String myFirstEntityRowKey = "m001";
* String mySecondEntityRowKey = "m002";
*
* TableEntity myFirstEntity = new TableEntity&
* .addProperty&
* .addProperty&
*
* myTransactionActions.add&
*
* System.out.printf&
* myFirstEntityRowKey&
*
* TableEntity mySecondEntity = new TableEntity&
* .addProperty&
* .addProperty&
*
* myTransactionActions.add&
*
* System.out.printf&
* mySecondEntityRowKey&
*
* Response<TableTransactionResult> response = tableClient.submitTransactionWithResponse&
* Duration.ofSeconds&
*
* System.out.printf&
* + " actions are:", response.getStatusCode&
*
* response.getValue&
* System.out.printf&
* </pre>
* <!-- end com.azure.data.tables.tableClient.submitTransactionWithResponse
* <p>Shows how to handle a transaction with a failing {@link TableTransactionAction action} via the provided
* {@link TableTransactionFailedException exception}, which contains the index of the first failing action in the
* transaction.</p>
* <!-- src_embed com.azure.data.tables.tableClient.submitTransactionWithResponseWithError
* <pre>
* try &
* Response<TableTransactionResult> transactionResultResponse =
* tableClient.submitTransactionWithResponse&
* new Context&
*
* System.out.printf&
* + " submitted actions are:", transactionResultResponse.getStatusCode&
*
* transactionResultResponse.getValue&
* .forEach&
* System.out.printf&
* &
* &
* int failedActionIndex = e.getFailedTransactionActionIndex&
* &
* &
* myTransactionActions.remove&
* &
* &
* </pre>
* <!-- end com.azure.data.tables.tableClient.submitTransactionWithResponseWithError
*
* @param transactionActions A {@link List} of {@link TableTransactionAction transaction actions} to perform on
* {@link TableEntity entities} in a table.
* @param timeout An optional timeout value beyond which a {@link RuntimeException} will be raised.
* @param context Additional {@link Context} that is passed through the {@link HttpPipeline HTTP pipeline} during
* the service call.
*
* @return An {@link Response HTTP response} produced for the transaction itself. The response's value will contain
* a {@link List} of {@link TableTransactionActionResponse sub-responses} that correspond to each
* {@link TableTransactionAction action} in the transaction.
*
* @throws IllegalArgumentException If no {@link TableTransactionAction actions} have been added to the list.
* @throws TableServiceException If the request is rejected by the service.
* @throws TableTransactionFailedException If any {@link TableTransactionAction action} within the transaction
* fails. See the documentation for the client methods in {@link TableClient} to understand the conditions that
* may cause a given {@link TableTransactionAction action} to fail.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<TableTransactionResult> submitTransactionWithResponse(List<TableTransactionAction> transactionActions, Duration timeout, Context context) {
OptionalLong timeoutInMillis = TableUtils.setTimeout(timeout);
Context contextValue = TableUtils.setContext(context, true);
if (transactionActions.isEmpty()) {
throw logger.logExceptionAsError(
new IllegalArgumentException("A transaction must contain at least one operation."));
}
final List<TransactionalBatchAction> operations = new ArrayList<>();
for (TableTransactionAction transactionAction : transactionActions) {
switch (transactionAction.getActionType()) {
case CREATE:
operations.add(new TransactionalBatchAction.CreateEntity(transactionAction.getEntity()));
break;
case UPSERT_MERGE:
operations.add(new TransactionalBatchAction.UpsertEntity(transactionAction.getEntity(),
TableEntityUpdateMode.MERGE));
break;
case UPSERT_REPLACE:
operations.add(new TransactionalBatchAction.UpsertEntity(transactionAction.getEntity(),
TableEntityUpdateMode.REPLACE));
break;
case UPDATE_MERGE:
operations.add(new TransactionalBatchAction.UpdateEntity(transactionAction.getEntity(),
TableEntityUpdateMode.MERGE, transactionAction.getIfUnchanged()));
break;
case UPDATE_REPLACE:
operations.add(new TransactionalBatchAction.UpdateEntity(transactionAction.getEntity(),
TableEntityUpdateMode.REPLACE, transactionAction.getIfUnchanged()));
break;
case DELETE:
operations.add(
new TransactionalBatchAction.DeleteEntity(transactionAction.getEntity(),
transactionAction.getIfUnchanged()));
break;
default:
break;
}
}
Callable<Response<TableTransactionResult>> callable = () -> {
BiConsumer<TransactionalBatchRequestBody, RequestActionPair> accumulator = (body, pair) ->
body.addChangeOperation(new TransactionalBatchSubRequest(pair.getAction(), pair.getRequest()));
BiConsumer<TransactionalBatchRequestBody, TransactionalBatchRequestBody> combiner = (body1, body2) ->
body2.getContents().forEach(req -> body1.addChangeOperation((TransactionalBatchSubRequest) req));
TransactionalBatchRequestBody requestBody =
operations.stream()
.map(op -> new RequestActionPair(op.prepareRequest(transactionalBatchClient), op))
.collect(TransactionalBatchRequestBody::new, accumulator, combiner);
ResponseBase<TransactionalBatchSubmitBatchHeaders, TableTransactionActionResponse[]> response =
transactionalBatchImplementation
.submitTransactionalBatchWithRestResponse(requestBody, null, contextValue);
Response<List<TableTransactionActionResponse>> parsedResponse = parseResponse(requestBody, response);
return new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(),
new TableTransactionResult(transactionActions, parsedResponse.getValue()));
};
try {
return timeoutInMillis.isPresent()
? THREAD_POOL.submit(callable).get(timeoutInMillis.getAsLong(), TimeUnit.MILLISECONDS)
: callable.call();
} catch (Exception ex) {
throw logger.logExceptionAsError((RuntimeException) TableUtils.interpretException(ex));
}
}
private static class RequestActionPair {
private final HttpRequest request;
private final TransactionalBatchAction action;
RequestActionPair(HttpRequest request, TransactionalBatchAction action) {
this.request = request;
this.action = action;
}
public HttpRequest getRequest() {
return request;
}
public TransactionalBatchAction getAction() {
return action;
}
}
private Response<List<TableTransactionActionResponse>> parseResponse(TransactionalBatchRequestBody requestBody,
ResponseBase<TransactionalBatchSubmitBatchHeaders, TableTransactionActionResponse[]> response) {
TableServiceError error = null;
String errorMessage = null;
TransactionalBatchChangeSet changes = null;
TransactionalBatchAction failedAction = null;
Integer failedIndex = null;
if (requestBody.getContents().get(0) instanceof TransactionalBatchChangeSet) {
changes = (TransactionalBatchChangeSet) requestBody.getContents().get(0);
}
for (int i = 0; i < response.getValue().length; i++) {
TableTransactionActionResponse subResponse = response.getValue()[i];
if (changes != null && changes.getContents().get(i) != null) {
ModelHelper.updateTableTransactionActionResponse(subResponse,
changes.getContents().get(i).getHttpRequest());
}
if (subResponse.getStatusCode() >= 400 && error == null && errorMessage == null) {
if (subResponse.getValue() instanceof TableServiceError) {
error = (TableServiceError) subResponse.getValue();
if (changes != null && error.getOdataError() != null
&& error.getOdataError().getMessage() != null
&& error.getOdataError().getMessage().getValue() != null) {
String message = error.getOdataError().getMessage().getValue();
try {
failedIndex = Integer.parseInt(message.substring(0, message.indexOf(":")));
failedAction = changes.getContents().get(failedIndex).getOperation();
} catch (NumberFormatException e) {
}
}
} else if (subResponse.getValue() instanceof String) {
errorMessage = "The service returned the following data for the failed operation: "
+ subResponse.getValue();
} else {
errorMessage =
"The service returned the following status code for the failed operation: "
+ subResponse.getStatusCode();
}
}
}
if (error != null || errorMessage != null) {
String message = "An action within the operation failed, the transaction has been rolled back.";
if (failedAction != null) {
message += " The failed operation was: " + failedAction;
} else if (errorMessage != null) {
message += " " + errorMessage;
}
throw logger.logExceptionAsError(new RuntimeException(
new TableTransactionFailedException(message, null, toTableServiceError(error), failedIndex)));
} else {
return new SimpleResponse<>(response, Arrays.asList(response.getValue()));
}
}
} |
Asserting client needs to be added to playback client case too. | private TableClientBuilder configureTestClientBuilder(TableClientBuilder tableClientBuilder, String tableName) {
tableClientBuilder
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.tableName(tableName);
if (interceptorManager.isPlaybackMode()) {
playbackClient = interceptorManager.getPlaybackClient();
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(buildAssertingClient(DEFAULT_HTTP_CLIENT));
if (!interceptorManager.isLiveMode()) {
recordPolicy = interceptorManager.getRecordPolicy();
tableClientBuilder.addPolicy(recordPolicy);
}
}
return tableClientBuilder;
} | tableClientBuilder.httpClient(playbackClient); | private TableClientBuilder configureTestClientBuilder(TableClientBuilder tableClientBuilder, String tableName) {
tableClientBuilder
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.tableName(tableName);
if (interceptorManager.isPlaybackMode()) {
playbackClient = interceptorManager.getPlaybackClient();
tableClientBuilder.httpClient(buildAssertingClient(playbackClient));
} else {
tableClientBuilder.httpClient(buildAssertingClient(DEFAULT_HTTP_CLIENT));
if (!interceptorManager.isLiveMode()) {
recordPolicy = interceptorManager.getRecordPolicy();
tableClientBuilder.addPolicy(recordPolicy);
}
}
return tableClientBuilder;
} | class TableClientTestBase extends TestBase {
protected static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
protected static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest();
protected HttpPipelinePolicy recordPolicy;
protected HttpClient playbackClient;
protected abstract HttpClient buildAssertingClient(HttpClient httpClient);
protected TableClientBuilder getClientBuilder(String tableName, String connectionString) {
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.connectionString(connectionString);
return configureTestClientBuilder(tableClientBuilder, tableName);
}
protected TableClientBuilder getClientBuilder(String tableName, String endpoint, TokenCredential tokenCredential,
boolean enableTenantDiscovery) {
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.credential(tokenCredential)
.endpoint(endpoint);
if (enableTenantDiscovery) {
tableClientBuilder.enableTenantDiscovery();
}
return configureTestClientBuilder(tableClientBuilder, tableName);
}
@Test
public abstract void createTable();
@Test
public abstract void createTableWithResponse();
@Test
public abstract void createEntity();
@Test
public abstract void createEntityWithSingleQuotesInPartitionKey();
@Test
public abstract void createEntityWithSingleQuotesInRowKey();
@Test
public abstract void createEntityWithResponse();
@Test
public abstract void createEntityWithAllSupportedDataTypes();
/*@Test
public abstract void createEntitySubclass();*/
@Test
public abstract void deleteTable();
@Test
public abstract void deleteNonExistingTable();
@Test
public abstract void deleteTableWithResponse();
@Test
public abstract void deleteNonExistingTableWithResponse();
@Test
public abstract void deleteEntity();
@Test
public abstract void deleteEntityWithSingleQuotesInPartitionKey();
@Test
public abstract void deleteEntityWithSingleQuotesInRowKey();
@Test
public abstract void deleteNonExistingEntity();
@Test
public abstract void deleteEntityWithResponse();
@Test
public abstract void deleteNonExistingEntityWithResponse();
@Test
public abstract void deleteEntityWithResponseMatchETag();
@Test
public abstract void getEntityWithSingleQuotesInPartitionKey();
@Test
public abstract void getEntityWithSingleQuotesInRowKey();
@Test
public abstract void getEntityWithResponse();
@Test
public abstract void getEntityWithResponseWithSelect();
/*@Test
public abstract void getEntityWithResponseSubclass();*/
@Test
public abstract void updateEntityWithSingleQuotesInPartitionKey();
@Test
public abstract void updateEntityWithSingleQuotesInRowKey();
@Test
public abstract void updateEntityWithResponseReplace();
@Test
public abstract void updateEntityWithResponseMerge();
/*@Test
public abstract void updateEntityWithResponseSubclass();*/
@Test
public abstract void listEntities();
@Test
public abstract void listEntitiesWithSingleQuotesInPartitionKey();
@Test
public abstract void listEntitiesWithSingleQuotesInRowKey();
@Test
public abstract void listEntitiesWithFilter();
@Test
public abstract void listEntitiesWithSelect();
@Test
public abstract void listEntitiesWithTop();
/*@Test
public abstract void listEntitiesSubclass();*/
@Test
public abstract void submitTransaction();
@Test
public abstract void submitTransactionAllActions();
@Test
public abstract void submitTransactionAllActionsForEntitiesWithSingleQuotesInPartitionKey();
@Test
public abstract void submitTransactionAllActionsForEntitiesWithSingleQuotesInRowKey();
@Test
public abstract void submitTransactionWithFailingAction();
@Test
public abstract void submitTransactionWithSameRowKeys();
@Test
public abstract void submitTransactionWithDifferentPartitionKeys();
@Test
public abstract void generateSasTokenWithMinimumParameters();
@Test
public abstract void generateSasTokenWithAllParameters();
@Test
public abstract void canUseSasTokenToCreateValidTableClient();
@Test
public abstract void setAndListAccessPolicies();
@Test
public abstract void setAndListMultipleAccessPolicies();
} | class TableClientTestBase extends TestBase {
protected static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
protected static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest();
protected HttpPipelinePolicy recordPolicy;
protected HttpClient playbackClient;
protected abstract HttpClient buildAssertingClient(HttpClient httpClient);
protected TableClientBuilder getClientBuilder(String tableName, String connectionString) {
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.connectionString(connectionString);
return configureTestClientBuilder(tableClientBuilder, tableName);
}
protected TableClientBuilder getClientBuilder(String tableName, String endpoint, TokenCredential tokenCredential,
boolean enableTenantDiscovery) {
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.credential(tokenCredential)
.endpoint(endpoint);
if (enableTenantDiscovery) {
tableClientBuilder.enableTenantDiscovery();
}
return configureTestClientBuilder(tableClientBuilder, tableName);
}
@Test
public abstract void createTable();
@Test
public abstract void createTableWithResponse();
@Test
public abstract void createEntity();
@Test
public abstract void createEntityWithSingleQuotesInPartitionKey();
@Test
public abstract void createEntityWithSingleQuotesInRowKey();
@Test
public abstract void createEntityWithResponse();
@Test
public abstract void createEntityWithAllSupportedDataTypes();
/*@Test
public abstract void createEntitySubclass();*/
@Test
public abstract void deleteTable();
@Test
public abstract void deleteNonExistingTable();
@Test
public abstract void deleteTableWithResponse();
@Test
public abstract void deleteNonExistingTableWithResponse();
@Test
public abstract void deleteEntity();
@Test
public abstract void deleteEntityWithSingleQuotesInPartitionKey();
@Test
public abstract void deleteEntityWithSingleQuotesInRowKey();
@Test
public abstract void deleteNonExistingEntity();
@Test
public abstract void deleteEntityWithResponse();
@Test
public abstract void deleteNonExistingEntityWithResponse();
@Test
public abstract void deleteEntityWithResponseMatchETag();
@Test
public abstract void getEntityWithSingleQuotesInPartitionKey();
@Test
public abstract void getEntityWithSingleQuotesInRowKey();
@Test
public abstract void getEntityWithResponse();
@Test
public abstract void getEntityWithResponseWithSelect();
/*@Test
public abstract void getEntityWithResponseSubclass();*/
@Test
public abstract void updateEntityWithSingleQuotesInPartitionKey();
@Test
public abstract void updateEntityWithSingleQuotesInRowKey();
@Test
public abstract void updateEntityWithResponseReplace();
@Test
public abstract void updateEntityWithResponseMerge();
/*@Test
public abstract void updateEntityWithResponseSubclass();*/
@Test
public abstract void listEntities();
@Test
public abstract void listEntitiesWithSingleQuotesInPartitionKey();
@Test
public abstract void listEntitiesWithSingleQuotesInRowKey();
@Test
public abstract void listEntitiesWithFilter();
@Test
public abstract void listEntitiesWithSelect();
@Test
public abstract void listEntitiesWithTop();
/*@Test
public abstract void listEntitiesSubclass();*/
@Test
public abstract void submitTransaction();
@Test
public abstract void submitTransactionAllActions();
@Test
public abstract void submitTransactionAllActionsForEntitiesWithSingleQuotesInPartitionKey();
@Test
public abstract void submitTransactionAllActionsForEntitiesWithSingleQuotesInRowKey();
@Test
public abstract void submitTransactionWithFailingAction();
@Test
public abstract void submitTransactionWithSameRowKeys();
@Test
public abstract void submitTransactionWithDifferentPartitionKeys();
@Test
public abstract void generateSasTokenWithMinimumParameters();
@Test
public abstract void generateSasTokenWithAllParameters();
@Test
public abstract void canUseSasTokenToCreateValidTableClient();
@Test
public abstract void setAndListAccessPolicies();
@Test
public abstract void setAndListMultipleAccessPolicies();
} |
Asserting client needs to be added to playback client case too. | private TableServiceClientBuilder configureTestClientBuilder(TableServiceClientBuilder tableServiceClientBuilder) {
tableServiceClientBuilder
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS));
if (interceptorManager.isPlaybackMode()) {
playbackClient = interceptorManager.getPlaybackClient();
tableServiceClientBuilder.httpClient(playbackClient);
} else {
tableServiceClientBuilder.httpClient(buildAssertingClient(DEFAULT_HTTP_CLIENT));
if (!interceptorManager.isLiveMode()) {
recordPolicy = interceptorManager.getRecordPolicy();
tableServiceClientBuilder.addPolicy(recordPolicy);
}
}
return tableServiceClientBuilder;
} | tableServiceClientBuilder.httpClient(playbackClient); | private TableServiceClientBuilder configureTestClientBuilder(TableServiceClientBuilder tableServiceClientBuilder) {
tableServiceClientBuilder
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS));
if (interceptorManager.isPlaybackMode()) {
playbackClient = interceptorManager.getPlaybackClient();
tableServiceClientBuilder.httpClient(buildAssertingClient(playbackClient));
} else {
tableServiceClientBuilder.httpClient(buildAssertingClient(DEFAULT_HTTP_CLIENT));
if (!interceptorManager.isLiveMode()) {
recordPolicy = interceptorManager.getRecordPolicy();
tableServiceClientBuilder.addPolicy(recordPolicy);
}
}
return tableServiceClientBuilder;
} | class TableServiceClientTestBase extends TestBase {
protected static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
protected HttpPipelinePolicy recordPolicy;
protected HttpClient playbackClient;
protected abstract HttpClient buildAssertingClient(HttpClient httpClient);
protected TableServiceClientBuilder getClientBuilder(String connectionString) {
final TableServiceClientBuilder tableServiceClientBuilder = new TableServiceClientBuilder()
.connectionString(connectionString);
return configureTestClientBuilder(tableServiceClientBuilder);
}
protected TableServiceClientBuilder getClientBuilder(String endpoint, TokenCredential tokenCredential,
boolean enableTenantDiscovery) {
final TableServiceClientBuilder tableServiceClientBuilder = new TableServiceClientBuilder()
.credential(tokenCredential)
.endpoint(endpoint);
if (enableTenantDiscovery) {
tableServiceClientBuilder.enableTenantDiscovery();
}
return configureTestClientBuilder(tableServiceClientBuilder);
}
@Test
public abstract void serviceCreateTable();
@Test
public abstract void serviceCreateTableWithResponse();
@Test
public abstract void serviceCreateTableFailsIfExists();
@Test
public abstract void serviceCreateTableIfNotExists();
@Test
public abstract void serviceCreateTableIfNotExistsSucceedsIfExists();
@Test
public abstract void serviceCreateTableIfNotExistsWithResponse();
@Test
public abstract void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists();
@Test
public abstract void serviceDeleteTable();
@Test
public abstract void serviceDeleteNonExistingTable();
@Test
public abstract void serviceDeleteTableWithResponse();
@Test
public abstract void serviceDeleteNonExistingTableWithResponse();
@Test
public abstract void serviceListTables();
@Test
public abstract void serviceListTablesWithFilter();
@Test
public abstract void serviceListTablesWithTop();
@Test
public abstract void serviceGetTableClient();
@Test
public abstract void generateAccountSasTokenWithMinimumParameters();
@Test
public abstract void generateAccountSasTokenWithAllParameters();
@Test
public abstract void canUseSasTokenToCreateValidTableClient();
@Test
public abstract void setGetProperties();
@Test
public abstract void getStatistics() throws URISyntaxException;
} | class TableServiceClientTestBase extends TestBase {
protected static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
protected HttpPipelinePolicy recordPolicy;
protected HttpClient playbackClient;
protected abstract HttpClient buildAssertingClient(HttpClient httpClient);
protected TableServiceClientBuilder getClientBuilder(String connectionString) {
final TableServiceClientBuilder tableServiceClientBuilder = new TableServiceClientBuilder()
.connectionString(connectionString);
return configureTestClientBuilder(tableServiceClientBuilder);
}
protected TableServiceClientBuilder getClientBuilder(String endpoint, TokenCredential tokenCredential,
boolean enableTenantDiscovery) {
final TableServiceClientBuilder tableServiceClientBuilder = new TableServiceClientBuilder()
.credential(tokenCredential)
.endpoint(endpoint);
if (enableTenantDiscovery) {
tableServiceClientBuilder.enableTenantDiscovery();
}
return configureTestClientBuilder(tableServiceClientBuilder);
}
@Test
public abstract void serviceCreateTable();
@Test
public abstract void serviceCreateTableWithResponse();
@Test
public abstract void serviceCreateTableFailsIfExists();
@Test
public abstract void serviceCreateTableIfNotExists();
@Test
public abstract void serviceCreateTableIfNotExistsSucceedsIfExists();
@Test
public abstract void serviceCreateTableIfNotExistsWithResponse();
@Test
public abstract void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists();
@Test
public abstract void serviceDeleteTable();
@Test
public abstract void serviceDeleteNonExistingTable();
@Test
public abstract void serviceDeleteTableWithResponse();
@Test
public abstract void serviceDeleteNonExistingTableWithResponse();
@Test
public abstract void serviceListTables();
@Test
public abstract void serviceListTablesWithFilter();
@Test
public abstract void serviceListTablesWithTop();
@Test
public abstract void serviceGetTableClient();
@Test
public abstract void generateAccountSasTokenWithMinimumParameters();
@Test
public abstract void generateAccountSasTokenWithAllParameters();
@Test
public abstract void canUseSasTokenToCreateValidTableClient();
@Test
public abstract void setGetProperties();
@Test
public abstract void getStatistics() throws URISyntaxException;
} |
We skipped paging and polling operations when adding sync stack support for Key Vault, as shown [here](https://github.com/Azure/azure-sdk-for-java/blob/a90cc9a4d40f9492e76d5a4589b321e94c0aa5de/sdk/keyvault/azure-security-keyvault-secrets/src/test/java/com/azure/security/keyvault/secrets/SecretClientTest.java#L61). Has that been taken care of @g2vinay, @samvaity? | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertAsync()
.build();
} | .build(); | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertAsync()
.build();
} | class TableServiceAsyncClientTest extends TableServiceClientTestBase {
private static final Duration TIMEOUT = Duration.ofSeconds(100);
private static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
private static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest();
private TableServiceAsyncClient serviceClient;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(TIMEOUT);
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@Override
protected void beforeTest() {
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
serviceClient = getClientBuilder(connectionString).buildAsyncClient();
}
@Test
public void serviceCreateTable() {
String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void serviceCreateTableWithMultipleTenants() {
Assumptions.assumeTrue(serviceClient.getServiceEndpoint().contains("core.windows.net")
&& serviceClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
String tableName = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableServiceAsyncClient tableServiceAsyncClient =
getClientBuilder(Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
StepVerifier.create(tableServiceAsyncClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
tableName = testResourceNamer.randomName("tableName", 20);
StepVerifier.create(tableServiceAsyncClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
StepVerifier.create(serviceClient.createTableWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableFailsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTable(tableName))
.expectErrorMatches(e -> e instanceof TableServiceException
&& ((TableServiceException) e).getResponse().getStatusCode() == 409)
.verify();
}
@Test
public void serviceCreateTableIfNotExists() {
String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.createTableIfNotExists(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTableIfNotExists(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 409;
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceDeleteTable() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.deleteTable(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteNonExistingTable() {
final String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.deleteTable(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
serviceClient.createTable(tableName).block();
StepVerifier.create(serviceClient.deleteTableWithResponse(tableName))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteNonExistingTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 404;
StepVerifier.create(serviceClient.deleteTableWithResponse(tableName))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void serviceListTables() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables())
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceListTablesWithFilter() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setFilter("TableName eq '" + tableName + "'");
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables(options))
.assertNext(table -> assertEquals(tableName, table.getName()))
.expectNextCount(0)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceListTablesWithTop() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
final String tableName3 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setTop(2);
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
serviceClient.createTable(tableName3).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables(options))
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceGetTableClient() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
TableAsyncClient tableClient = serviceClient.getTableClient(tableName);
TableAsyncClientTest.getEntityWithResponseAsyncImpl(tableClient, testResourceNamer, "partitionKey", "rowKey");
}
@Test
public void generateAccountSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("r");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateAccountSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("rdau");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange);
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=rdau"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("a");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(serviceClient.getServiceEndpoint())
.sasToken(sas)
.tableName(tableName);
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void setGetProperties() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and getting properties is not supported on Cosmos endpoints.");
TableServiceRetentionPolicy retentionPolicy = new TableServiceRetentionPolicy()
.setDaysToRetain(5)
.setEnabled(true);
TableServiceLogging logging = new TableServiceLogging()
.setReadLogged(true)
.setAnalyticsVersion("1.0")
.setRetentionPolicy(retentionPolicy);
List<TableServiceCorsRule> corsRules = new ArrayList<>();
corsRules.add(new TableServiceCorsRule()
.setAllowedMethods("GET,PUT,HEAD")
.setAllowedOrigins("*")
.setAllowedHeaders("x-ms-version")
.setExposedHeaders("x-ms-client-request-id")
.setMaxAgeInSeconds(10));
TableServiceMetrics hourMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceMetrics minuteMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceProperties sentProperties = new TableServiceProperties()
.setLogging(logging)
.setCorsRules(corsRules)
.setMinuteMetrics(minuteMetrics)
.setHourMetrics(hourMetrics);
StepVerifier.create(serviceClient.setPropertiesWithResponse(sentProperties))
.assertNext(response -> {
assertNotNull(response.getHeaders().getValue("x-ms-request-id"));
assertNotNull(response.getHeaders().getValue("x-ms-version"));
})
.expectComplete()
.verify();
sleepIfRunningAgainstService(30000);
StepVerifier.create(serviceClient.getProperties())
.assertNext(retrievedProperties -> assertPropertiesEquals(sentProperties, retrievedProperties))
.expectComplete()
.verify();
}
@Test
public void getStatistics() throws URISyntaxException {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Getting statistics is not supported on Cosmos endpoints.");
URI primaryEndpoint = new URI(serviceClient.getServiceEndpoint());
String[] hostParts = primaryEndpoint.getHost().split("\\.");
StringJoiner secondaryHostJoiner = new StringJoiner(".");
secondaryHostJoiner.add(hostParts[0] + "-secondary");
for (int i = 1; i < hostParts.length; i++) {
secondaryHostJoiner.add(hostParts[i]);
}
String secondaryEndpoint = primaryEndpoint.getScheme() + ":
TableServiceAsyncClient secondaryClient = new TableServiceClientBuilder()
.endpoint(secondaryEndpoint)
.serviceVersion(serviceClient.getServiceVersion())
.pipeline(serviceClient.getHttpPipeline())
.buildAsyncClient();
StepVerifier.create(secondaryClient.getStatistics())
.assertNext(statistics -> {
assertNotNull(statistics);
assertNotNull(statistics.getGeoReplication());
assertNotNull(statistics.getGeoReplication().getStatus());
assertNotNull(statistics.getGeoReplication().getLastSyncTime());
})
.expectComplete()
.verify();
}
} | class TableServiceAsyncClientTest extends TableServiceClientTestBase {
private static final Duration TIMEOUT = Duration.ofSeconds(100);
private static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
private static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest();
private TableServiceAsyncClient serviceClient;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(TIMEOUT);
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@Override
protected void beforeTest() {
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
serviceClient = getClientBuilder(connectionString).buildAsyncClient();
}
@Test
public void serviceCreateTable() {
String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void serviceCreateTableWithMultipleTenants() {
Assumptions.assumeTrue(serviceClient.getServiceEndpoint().contains("core.windows.net")
&& serviceClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
String tableName = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableServiceAsyncClient tableServiceAsyncClient =
getClientBuilder(Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
StepVerifier.create(tableServiceAsyncClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
tableName = testResourceNamer.randomName("tableName", 20);
StepVerifier.create(tableServiceAsyncClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
StepVerifier.create(serviceClient.createTableWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableFailsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTable(tableName))
.expectErrorMatches(e -> e instanceof TableServiceException
&& ((TableServiceException) e).getResponse().getStatusCode() == 409)
.verify();
}
@Test
public void serviceCreateTableIfNotExists() {
String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.createTableIfNotExists(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTableIfNotExists(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 409;
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceDeleteTable() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.deleteTable(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteNonExistingTable() {
final String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.deleteTable(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
serviceClient.createTable(tableName).block();
StepVerifier.create(serviceClient.deleteTableWithResponse(tableName))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteNonExistingTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 404;
StepVerifier.create(serviceClient.deleteTableWithResponse(tableName))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void serviceListTables() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables())
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceListTablesWithFilter() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setFilter("TableName eq '" + tableName + "'");
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables(options))
.assertNext(table -> assertEquals(tableName, table.getName()))
.expectNextCount(0)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceListTablesWithTop() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
final String tableName3 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setTop(2);
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
serviceClient.createTable(tableName3).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables(options))
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceGetTableClient() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
TableAsyncClient tableClient = serviceClient.getTableClient(tableName);
TableAsyncClientTest.getEntityWithResponseAsyncImpl(tableClient, testResourceNamer, "partitionKey", "rowKey");
}
@Test
public void generateAccountSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("r");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateAccountSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("rdau");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange);
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=rdau"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("a");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(serviceClient.getServiceEndpoint())
.sasToken(sas)
.tableName(tableName);
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void setGetProperties() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and getting properties is not supported on Cosmos endpoints.");
TableServiceRetentionPolicy retentionPolicy = new TableServiceRetentionPolicy()
.setDaysToRetain(5)
.setEnabled(true);
TableServiceLogging logging = new TableServiceLogging()
.setReadLogged(true)
.setAnalyticsVersion("1.0")
.setRetentionPolicy(retentionPolicy);
List<TableServiceCorsRule> corsRules = new ArrayList<>();
corsRules.add(new TableServiceCorsRule()
.setAllowedMethods("GET,PUT,HEAD")
.setAllowedOrigins("*")
.setAllowedHeaders("x-ms-version")
.setExposedHeaders("x-ms-client-request-id")
.setMaxAgeInSeconds(10));
TableServiceMetrics hourMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceMetrics minuteMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceProperties sentProperties = new TableServiceProperties()
.setLogging(logging)
.setCorsRules(corsRules)
.setMinuteMetrics(minuteMetrics)
.setHourMetrics(hourMetrics);
StepVerifier.create(serviceClient.setPropertiesWithResponse(sentProperties))
.assertNext(response -> {
assertNotNull(response.getHeaders().getValue("x-ms-request-id"));
assertNotNull(response.getHeaders().getValue("x-ms-version"));
})
.expectComplete()
.verify();
sleepIfRunningAgainstService(30000);
StepVerifier.create(serviceClient.getProperties())
.assertNext(retrievedProperties -> assertPropertiesEquals(sentProperties, retrievedProperties))
.expectComplete()
.verify();
}
@Test
public void getStatistics() throws URISyntaxException {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Getting statistics is not supported on Cosmos endpoints.");
URI primaryEndpoint = new URI(serviceClient.getServiceEndpoint());
String[] hostParts = primaryEndpoint.getHost().split("\\.");
StringJoiner secondaryHostJoiner = new StringJoiner(".");
secondaryHostJoiner.add(hostParts[0] + "-secondary");
for (int i = 1; i < hostParts.length; i++) {
secondaryHostJoiner.add(hostParts[i]);
}
String secondaryEndpoint = primaryEndpoint.getScheme() + ":
TableServiceAsyncClient secondaryClient = new TableServiceClientBuilder()
.endpoint(secondaryEndpoint)
.serviceVersion(serviceClient.getServiceVersion())
.pipeline(serviceClient.getHttpPipeline())
.buildAsyncClient();
StepVerifier.create(secondaryClient.getStatistics())
.assertNext(statistics -> {
assertNotNull(statistics);
assertNotNull(statistics.getGeoReplication());
assertNotNull(statistics.getGeoReplication().getStatus());
assertNotNull(statistics.getGeoReplication().getLastSyncTime());
})
.expectComplete()
.verify();
}
} |
Let's see if we need to skip paging and polling methods here like I mentioned on my other comment. | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertSync()
.build();
} | .build(); | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertSync()
.build();
} | class TableServiceClientTest extends TableServiceClientTestBase {
private static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
private static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest();
private TableServiceClient serviceClient;
@Override
protected void beforeTest() {
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
serviceClient = getClientBuilder(connectionString).buildClient();
}
@Test
public void serviceCreateTable() {
String tableName = testResourceNamer.randomName("test", 20);
assertNotNull(serviceClient.createTable(tableName));
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void serviceCreateTableWithMultipleTenants() {
Assumptions.assumeTrue(serviceClient.getServiceEndpoint().contains("core.windows.net")
&& serviceClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
String tableName = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableServiceClient tableServiceClient =
getClientBuilder(Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
assertNotNull(tableServiceClient.createTable(tableName));
tableName = testResourceNamer.randomName("tableName", 20);
assertNotNull(tableServiceClient.createTable(tableName));
}
@Test
public void serviceCreateTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
assertNotNull(serviceClient.createTableWithResponse(tableName, null, null).getValue());
}
@Test
public void serviceCreateTableFailsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
assertThrows(TableServiceException.class, () -> serviceClient.createTable(tableName));
}
@Test
public void serviceCreateTableIfNotExists() {
String tableName = testResourceNamer.randomName("test", 20);
assertNotNull(serviceClient.createTableIfNotExists(tableName));
}
@Test
public void serviceCreateTableIfNotExistsSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
assertNull(serviceClient.createTableIfNotExists(tableName));
}
@Test
public void serviceCreateTableIfNotExistsWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
final Response<TableClient> response = serviceClient.createTableIfNotExistsWithResponse(tableName, null, null);
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
}
@Test
public void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 409;
serviceClient.createTable(tableName);
final Response<TableClient> response = serviceClient.createTableIfNotExistsWithResponse(tableName, null, null);
assertEquals(expectedStatusCode, response.getStatusCode());
assertNull(response.getValue());
}
@Test
public void serviceDeleteTable() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
assertDoesNotThrow(() -> serviceClient.deleteTable(tableName));
}
@Test
public void serviceDeleteNonExistingTable() {
final String tableName = testResourceNamer.randomName("test", 20);
assertDoesNotThrow(() -> serviceClient.createTable(tableName));
}
@Test
public void serviceDeleteTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
serviceClient.createTable(tableName);
assertEquals(expectedStatusCode, serviceClient.deleteTableWithResponse(tableName, null, null).getStatusCode());
}
@Test
public void serviceDeleteNonExistingTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 404;
assertEquals(expectedStatusCode, serviceClient.deleteTableWithResponse(tableName, null, null).getStatusCode());
}
@Test
public void serviceListTables() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
serviceClient.createTable(tableName2);
Iterator<PagedResponse<TableItem>> iterator = serviceClient.listTables().iterableByPage().iterator();
assertTrue(iterator.hasNext());
assertTrue(2 <= iterator.next().getValue().size());
}
@Test
public void serviceListTablesWithFilter() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setFilter("TableName eq '" + tableName + "'");
serviceClient.createTable(tableName);
serviceClient.createTable(tableName2);
serviceClient.listTables(options, null, null)
.forEach(tableItem -> assertEquals(tableName, tableItem.getName()));
}
@Test
public void serviceListTablesWithTop() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
final String tableName3 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setTop(2);
serviceClient.createTable(tableName);
serviceClient.createTable(tableName2);
serviceClient.createTable(tableName3);
Iterator<PagedResponse<TableItem>> iterator =
serviceClient.listTables(options, null, null).iterableByPage().iterator();
assertTrue(iterator.hasNext());
assertEquals(2, iterator.next().getValue().size());
}
@Test
public void serviceGetTableClient() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
TableClient tableClient = serviceClient.getTableClient(tableName);
TableClientTest.getEntityWithResponseImpl(tableClient, testResourceNamer, "partitionKey", "rowKey");
}
@Test
public void generateAccountSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("r");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateAccountSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("rdau");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange);
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=rdau"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("a");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(serviceClient.getServiceEndpoint())
.sasToken(sas)
.tableName(tableName);
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void setGetProperties() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and getting properties is not supported on Cosmos endpoints.");
TableServiceRetentionPolicy retentionPolicy = new TableServiceRetentionPolicy()
.setDaysToRetain(5)
.setEnabled(true);
TableServiceLogging logging = new TableServiceLogging()
.setReadLogged(true)
.setAnalyticsVersion("1.0")
.setRetentionPolicy(retentionPolicy);
List<TableServiceCorsRule> corsRules = new ArrayList<>();
corsRules.add(new TableServiceCorsRule()
.setAllowedMethods("GET,PUT,HEAD")
.setAllowedOrigins("*")
.setAllowedHeaders("x-ms-version")
.setExposedHeaders("x-ms-client-request-id")
.setMaxAgeInSeconds(10));
TableServiceMetrics hourMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceMetrics minuteMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceProperties sentProperties = new TableServiceProperties()
.setLogging(logging)
.setCorsRules(corsRules)
.setMinuteMetrics(minuteMetrics)
.setHourMetrics(hourMetrics);
Response<Void> response = serviceClient.setPropertiesWithResponse(sentProperties, null, null);
assertNotNull(response.getHeaders().getValue("x-ms-request-id"));
assertNotNull(response.getHeaders().getValue("x-ms-version"));
sleepIfRunningAgainstService(20000);
TableServiceProperties retrievedProperties = serviceClient.getProperties();
assertPropertiesEquals(sentProperties, retrievedProperties);
}
@Test
public void getStatistics() throws URISyntaxException {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Getting statistics is not supported on Cosmos endpoints.");
URI primaryEndpoint = new URI(serviceClient.getServiceEndpoint());
String[] hostParts = primaryEndpoint.getHost().split("\\.");
StringJoiner secondaryHostJoiner = new StringJoiner(".");
secondaryHostJoiner.add(hostParts[0] + "-secondary");
for (int i = 1; i < hostParts.length; i++) {
secondaryHostJoiner.add(hostParts[i]);
}
String secondaryEndpoint = primaryEndpoint.getScheme() + ":
TableServiceClient secondaryClient = new TableServiceClientBuilder()
.endpoint(secondaryEndpoint)
.serviceVersion(serviceClient.getServiceVersion())
.pipeline(serviceClient.getHttpPipeline())
.buildClient();
TableServiceStatistics statistics = secondaryClient.getStatistics();
assertNotNull(statistics);
assertNotNull(statistics.getGeoReplication());
assertNotNull(statistics.getGeoReplication().getStatus());
assertNotNull(statistics.getGeoReplication().getLastSyncTime());
}
} | class TableServiceClientTest extends TableServiceClientTestBase {
private static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
private static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest();
private TableServiceClient serviceClient;
@Override
protected void beforeTest() {
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
serviceClient = getClientBuilder(connectionString).buildClient();
}
@Test
public void serviceCreateTable() {
String tableName = testResourceNamer.randomName("test", 20);
assertNotNull(serviceClient.createTable(tableName));
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void serviceCreateTableWithMultipleTenants() {
Assumptions.assumeTrue(serviceClient.getServiceEndpoint().contains("core.windows.net")
&& serviceClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
String tableName = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableServiceClient tableServiceClient =
getClientBuilder(Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
assertNotNull(tableServiceClient.createTable(tableName));
tableName = testResourceNamer.randomName("tableName", 20);
assertNotNull(tableServiceClient.createTable(tableName));
}
@Test
public void serviceCreateTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
assertNotNull(serviceClient.createTableWithResponse(tableName, null, null).getValue());
}
@Test
public void serviceCreateTableFailsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
assertThrows(TableServiceException.class, () -> serviceClient.createTable(tableName));
}
@Test
public void serviceCreateTableIfNotExists() {
String tableName = testResourceNamer.randomName("test", 20);
assertNotNull(serviceClient.createTableIfNotExists(tableName));
}
@Test
public void serviceCreateTableIfNotExistsSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
assertNull(serviceClient.createTableIfNotExists(tableName));
}
@Test
public void serviceCreateTableIfNotExistsWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
final Response<TableClient> response = serviceClient.createTableIfNotExistsWithResponse(tableName, null, null);
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
}
@Test
public void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 409;
serviceClient.createTable(tableName);
final Response<TableClient> response = serviceClient.createTableIfNotExistsWithResponse(tableName, null, null);
assertEquals(expectedStatusCode, response.getStatusCode());
assertNull(response.getValue());
}
@Test
public void serviceDeleteTable() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
assertDoesNotThrow(() -> serviceClient.deleteTable(tableName));
}
@Test
public void serviceDeleteNonExistingTable() {
final String tableName = testResourceNamer.randomName("test", 20);
assertDoesNotThrow(() -> serviceClient.createTable(tableName));
}
@Test
public void serviceDeleteTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
serviceClient.createTable(tableName);
assertEquals(expectedStatusCode, serviceClient.deleteTableWithResponse(tableName, null, null).getStatusCode());
}
@Test
public void serviceDeleteNonExistingTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 404;
assertEquals(expectedStatusCode, serviceClient.deleteTableWithResponse(tableName, null, null).getStatusCode());
}
@Test
public void serviceListTables() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
serviceClient.createTable(tableName2);
Iterator<PagedResponse<TableItem>> iterator = serviceClient.listTables().iterableByPage().iterator();
assertTrue(iterator.hasNext());
assertTrue(2 <= iterator.next().getValue().size());
}
@Test
public void serviceListTablesWithFilter() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setFilter("TableName eq '" + tableName + "'");
serviceClient.createTable(tableName);
serviceClient.createTable(tableName2);
serviceClient.listTables(options, null, null)
.forEach(tableItem -> assertEquals(tableName, tableItem.getName()));
}
@Test
public void serviceListTablesWithTop() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
final String tableName3 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setTop(2);
serviceClient.createTable(tableName);
serviceClient.createTable(tableName2);
serviceClient.createTable(tableName3);
Iterator<PagedResponse<TableItem>> iterator =
serviceClient.listTables(options, null, null).iterableByPage().iterator();
assertTrue(iterator.hasNext());
assertEquals(2, iterator.next().getValue().size());
}
@Test
public void serviceGetTableClient() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
TableClient tableClient = serviceClient.getTableClient(tableName);
TableClientTest.getEntityWithResponseImpl(tableClient, testResourceNamer, "partitionKey", "rowKey");
}
@Test
public void generateAccountSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("r");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateAccountSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("rdau");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange);
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=rdau"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("a");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(serviceClient.getServiceEndpoint())
.sasToken(sas)
.tableName(tableName);
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void setGetProperties() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and getting properties is not supported on Cosmos endpoints.");
TableServiceRetentionPolicy retentionPolicy = new TableServiceRetentionPolicy()
.setDaysToRetain(5)
.setEnabled(true);
TableServiceLogging logging = new TableServiceLogging()
.setReadLogged(true)
.setAnalyticsVersion("1.0")
.setRetentionPolicy(retentionPolicy);
List<TableServiceCorsRule> corsRules = new ArrayList<>();
corsRules.add(new TableServiceCorsRule()
.setAllowedMethods("GET,PUT,HEAD")
.setAllowedOrigins("*")
.setAllowedHeaders("x-ms-version")
.setExposedHeaders("x-ms-client-request-id")
.setMaxAgeInSeconds(10));
TableServiceMetrics hourMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceMetrics minuteMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceProperties sentProperties = new TableServiceProperties()
.setLogging(logging)
.setCorsRules(corsRules)
.setMinuteMetrics(minuteMetrics)
.setHourMetrics(hourMetrics);
Response<Void> response = serviceClient.setPropertiesWithResponse(sentProperties, null, null);
assertNotNull(response.getHeaders().getValue("x-ms-request-id"));
assertNotNull(response.getHeaders().getValue("x-ms-version"));
sleepIfRunningAgainstService(20000);
TableServiceProperties retrievedProperties = serviceClient.getProperties();
assertPropertiesEquals(sentProperties, retrievedProperties);
}
@Test
public void getStatistics() throws URISyntaxException {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Getting statistics is not supported on Cosmos endpoints.");
URI primaryEndpoint = new URI(serviceClient.getServiceEndpoint());
String[] hostParts = primaryEndpoint.getHost().split("\\.");
StringJoiner secondaryHostJoiner = new StringJoiner(".");
secondaryHostJoiner.add(hostParts[0] + "-secondary");
for (int i = 1; i < hostParts.length; i++) {
secondaryHostJoiner.add(hostParts[i]);
}
String secondaryEndpoint = primaryEndpoint.getScheme() + ":
TableServiceClient secondaryClient = new TableServiceClientBuilder()
.endpoint(secondaryEndpoint)
.serviceVersion(serviceClient.getServiceVersion())
.pipeline(serviceClient.getHttpPipeline())
.buildClient();
TableServiceStatistics statistics = secondaryClient.getStatistics();
assertNotNull(statistics);
assertNotNull(statistics.getGeoReplication());
assertNotNull(statistics.getGeoReplication().getStatus());
assertNotNull(statistics.getGeoReplication().getLastSyncTime());
}
} |
Let's see if we need to skip paging and polling methods here like I mentioned on my other comment. | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertSync()
.build();
} | .build(); | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertSync()
.build();
} | class TableClientTest extends TableClientTestBase {
private TableClient tableClient;
protected void beforeTest() {
final String tableName = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
tableClient = getClientBuilder(tableName, connectionString).buildClient();
tableClient.createTable();
}
@Test
public void createTable() {
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
final TableClient tableClient2 = getClientBuilder(tableName2, connectionString).buildClient();
assertNotNull(tableClient2.createTable());
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void createTableWithMultipleTenants() {
Assumptions.assumeTrue(tableClient.getTableEndpoint().contains("core.windows.net")
&& tableClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableClient tableClient2 =
getClientBuilder(tableName2, Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
assertNotNull(tableClient2.createTable());
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
assertDoesNotThrow(() -> tableClient2.createEntity(tableEntity));
}
@Test
public void createTableWithResponse() {
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
final TableClient tableClient2 = getClientBuilder(tableName2, connectionString).buildClient();
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode, tableClient2.createTableWithResponse(null, null).getStatusCode());
}
@Test
public void createEntity() {
createEntityImpl("partitionKey", "rowKey");
}
@Test
public void createEntityWithSingleQuotesInPartitionKey() {
createEntityImpl("partition'Key", "rowKey");
}
@Test
public void createEntityWithSingleQuotesInRowKey() {
createEntityImpl("partitionKey", "row'Key");
}
private void createEntityImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
assertDoesNotThrow(() -> tableClient.createEntity(tableEntity));
}
@Test
public void createEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode, tableClient.createEntityWithResponse(entity, null, null).getStatusCode());
}
@Test
public void createEntityWithAllSupportedDataTypes() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final boolean booleanValue = true;
final byte[] binaryValue = "Test value".getBytes();
final Date dateValue = new Date();
final OffsetDateTime offsetDateTimeValue = OffsetDateTime.now();
final double doubleValue = 2.0d;
final UUID guidValue = UUID.randomUUID();
final int int32Value = 1337;
final long int64Value = 1337L;
final String stringValue = "This is table entity";
tableEntity.addProperty("BinaryTypeProperty", binaryValue);
tableEntity.addProperty("BooleanTypeProperty", booleanValue);
tableEntity.addProperty("DateTypeProperty", dateValue);
tableEntity.addProperty("OffsetDateTimeTypeProperty", offsetDateTimeValue);
tableEntity.addProperty("DoubleTypeProperty", doubleValue);
tableEntity.addProperty("GuidTypeProperty", guidValue);
tableEntity.addProperty("Int32TypeProperty", int32Value);
tableEntity.addProperty("Int64TypeProperty", int64Value);
tableEntity.addProperty("StringTypeProperty", stringValue);
tableClient.createEntity(tableEntity);
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, null, null);
final TableEntity entity = response.getValue();
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.get("BinaryTypeProperty") instanceof byte[]);
assertTrue(properties.get("BooleanTypeProperty") instanceof Boolean);
assertTrue(properties.get("DateTypeProperty") instanceof OffsetDateTime);
assertTrue(properties.get("OffsetDateTimeTypeProperty") instanceof OffsetDateTime);
assertTrue(properties.get("DoubleTypeProperty") instanceof Double);
assertTrue(properties.get("GuidTypeProperty") instanceof UUID);
assertTrue(properties.get("Int32TypeProperty") instanceof Integer);
assertTrue(properties.get("Int64TypeProperty") instanceof Long);
assertTrue(properties.get("StringTypeProperty") instanceof String);
}
/*@Test
public void createEntitySubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
byte[] bytes = new byte[]{1, 2, 3};
boolean b = true;
OffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
double d = 1.23D;
UUID uuid = UUID.fromString("11111111-2222-3333-4444-555555555555");
int i = 123;
long l = 123L;
String s = "Test";
SampleEntity.Color color = SampleEntity.Color.GREEN;
SampleEntity tableEntity = new SampleEntity(partitionKeyValue, rowKeyValue);
tableEntity.setByteField(bytes);
tableEntity.setBooleanField(b);
tableEntity.setDateTimeField(dateTime);
tableEntity.setDoubleField(d);
tableEntity.setUuidField(uuid);
tableEntity.setIntField(i);
tableEntity.setLongField(l);
tableEntity.setStringField(s);
tableEntity.setEnumField(color);
tableClient.createEntity(tableEntity);
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, null, null);
TableEntity entity = response.getValue();
assertArrayEquals((byte[]) entity.getProperties().get("ByteField"), bytes);
assertEquals(entity.getProperties().get("BooleanField"), b);
assertTrue(dateTime.isEqual((OffsetDateTime) entity.getProperties().get("DateTimeField")));
assertEquals(entity.getProperties().get("DoubleField"), d);
assertEquals(0, uuid.compareTo((UUID) entity.getProperties().get("UuidField")));
assertEquals(entity.getProperties().get("IntField"), i);
assertEquals(entity.getProperties().get("LongField"), l);
assertEquals(entity.getProperties().get("StringField"), s);
assertEquals(entity.getProperties().get("EnumField"), color.name());
}*/
@Test
public void deleteTable() {
assertDoesNotThrow(() -> tableClient.deleteTable());
}
@Test
public void deleteNonExistingTable() {
tableClient.deleteTable();
assertDoesNotThrow(() -> tableClient.deleteTable());
}
@Test
public void deleteTableWithResponse() {
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode, tableClient.deleteTableWithResponse(null, null).getStatusCode());
}
@Test
public void deleteNonExistingTableWithResponse() {
final int expectedStatusCode = 404;
tableClient.deleteTableWithResponse(null, null);
assertEquals(expectedStatusCode, tableClient.deleteTableWithResponse(null, null).getStatusCode());
}
@Test
public void deleteEntity() {
deleteEntityImpl("partitionKey", "rowKey");
}
@Test
public void deleteEntityWithSingleQuotesInPartitionKey() {
deleteEntityImpl("partition'Key", "rowKey");
}
@Test
public void deleteEntityWithSingleQuotesInRowKey() {
deleteEntityImpl("partitionKey", "row'Key");
}
private void deleteEntityImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableClient.createEntity(tableEntity);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
assertDoesNotThrow(() -> tableClient.deleteEntity(partitionKeyValue, rowKeyValue));
}
@Test
public void deleteNonExistingEntity() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
assertDoesNotThrow(() -> tableClient.deleteEntity(partitionKeyValue, rowKeyValue));
}
@Test
public void deleteEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
tableClient.createEntity(tableEntity);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
assertEquals(expectedStatusCode,
tableClient.deleteEntityWithResponse(createdEntity, false, null, null).getStatusCode());
}
@Test
public void deleteNonExistingEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 404;
assertEquals(expectedStatusCode,
tableClient.deleteEntityWithResponse(entity, false, null, null).getStatusCode());
}
@Test
public void deleteEntityWithResponseMatchETag() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
tableClient.createEntity(tableEntity);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
assertEquals(expectedStatusCode,
tableClient.deleteEntityWithResponse(createdEntity, true, null, null).getStatusCode());
}
@Test
public void getEntityWithSingleQuotesInPartitionKey() {
getEntityWithResponseImpl(tableClient, testResourceNamer, "partition'Key", "rowKey");
}
@Test
public void getEntityWithSingleQuotesInRowKey() {
getEntityWithResponseImpl(tableClient, testResourceNamer, "partitionKey", "row'Key");
}
@Test
public void getEntityWithResponse() {
getEntityWithResponseImpl(tableClient, testResourceNamer, "partitionKey", "rowKey");
}
static void getEntityWithResponseImpl(TableClient tableClient, TestResourceNamer testResourceNamer,
String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 200;
tableClient.createEntity(tableEntity);
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, null, null);
final TableEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());
assertEquals(tableEntity.getRowKey(), entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertNotNull(entity.getProperties());
}
@Test
public void getEntityWithResponseWithSelect() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableEntity.addProperty("Test", "Value");
final int expectedStatusCode = 200;
tableClient.createEntity(tableEntity);
List<String> propertyList = new ArrayList<>();
propertyList.add("Test");
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, propertyList, null, null);
final TableEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertNull(entity.getPartitionKey());
assertNull(entity.getRowKey());
assertNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertEquals(entity.getProperties().get("Test"), "Value");
}
@Test
public void updateEntityWithSingleQuotesInPartitionKey() {
updateEntityWithResponseImpl(TableEntityUpdateMode.MERGE, testResourceNamer.randomName("partition'Key", 20),
testResourceNamer.randomName("rowKey", 20));
}
@Test
public void updateEntityWithSingleQuotesInRowKey() {
updateEntityWithResponseImpl(TableEntityUpdateMode.MERGE, testResourceNamer.randomName("partitionKey", 20),
testResourceNamer.randomName("row'Key", 20));
}
/*@Test
public void getEntityWithResponseSubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
byte[] bytes = new byte[]{1, 2, 3};
boolean b = true;
OffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
double d = 1.23D;
UUID uuid = UUID.fromString("11111111-2222-3333-4444-555555555555");
int i = 123;
long l = 123L;
String s = "Test";
SampleEntity.Color color = SampleEntity.Color.GREEN;
final Map<String, Object> props = new HashMap<>();
props.put("ByteField", bytes);
props.put("BooleanField", b);
props.put("DateTimeField", dateTime);
props.put("DoubleField", d);
props.put("UuidField", uuid);
props.put("IntField", i);
props.put("LongField", l);
props.put("StringField", s);
props.put("EnumField", color);
TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableEntity.setProperties(props);
int expectedStatusCode = 200;
tableClient.createEntity(tableEntity);
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, SampleEntity.class, null, null);
SampleEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());
assertEquals(tableEntity.getRowKey(), entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertArrayEquals(bytes, entity.getByteField());
assertEquals(b, entity.getBooleanField());
assertTrue(dateTime.isEqual(entity.getDateTimeField()));
assertEquals(d, entity.getDoubleField());
assertEquals(0, uuid.compareTo(entity.getUuidField()));
assertEquals(i, entity.getIntField());
assertEquals(l, entity.getLongField());
assertEquals(s, entity.getStringField());
assertEquals(color, entity.getEnumField());
}*/
@Test
public void updateEntityWithResponseReplace() {
updateEntityWithResponseImpl(TableEntityUpdateMode.REPLACE, "partitionKey", "rowKey");
}
@Test
public void updateEntityWithResponseMerge() {
updateEntityWithResponseImpl(TableEntityUpdateMode.MERGE, "partitionKey", "rowKey");
}
/**
* In the case of {@link TableEntityUpdateMode
* In the case of {@link TableEntityUpdateMode
*/
void updateEntityWithResponseImpl(TableEntityUpdateMode mode, String partitionKeyPrefix, String rowKeyPrefix) {
final boolean expectOldProperty = mode == TableEntityUpdateMode.MERGE;
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final int expectedStatusCode = 204;
final String oldPropertyKey = "propertyA";
final String newPropertyKey = "propertyB";
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue)
.addProperty(oldPropertyKey, "valueA");
tableClient.createEntity(tableEntity);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
createdEntity.getProperties().remove(oldPropertyKey);
createdEntity.addProperty(newPropertyKey, "valueB");
assertEquals(expectedStatusCode,
tableClient.updateEntityWithResponse(createdEntity, mode, true, null, null).getStatusCode());
TableEntity entity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.containsKey(newPropertyKey));
assertEquals(expectOldProperty, properties.containsKey(oldPropertyKey));
}
/*@Test
public void updateEntityWithResponseSubclass() {
String partitionKeyValue = testResourceNamer.randomName("APartitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("ARowKey", 20);
int expectedStatusCode = 204;
SingleFieldEntity tableEntity = new SingleFieldEntity(partitionKeyValue, rowKeyValue);
tableEntity.setSubclassProperty("InitialValue");
tableClient.createEntity(tableEntity);
tableEntity.setSubclassProperty("UpdatedValue");
assertEquals(expectedStatusCode,
tableClient.updateEntityWithResponse(tableEntity, TableEntityUpdateMode.REPLACE, true, null, null)
.getStatusCode()));
TableEntity entity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.containsKey("SubclassProperty"));
assertEquals("UpdatedValue", properties.get("SubclassProperty"));
}*/
@Test
public void listEntities() {
listEntitiesImpl("partitionKey", "rowKey");
}
@Test
public void listEntitiesWithSingleQuotesInPartitionKey() {
listEntitiesImpl("partition'Key", "rowKey");
}
@Test
public void listEntitiesWithSingleQuotesInRowKey() {
listEntitiesImpl("partitionKey", "row'Key");
}
private void listEntitiesImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final String rowKeyValue2 = testResourceNamer.randomName(rowKeyPrefix, 20);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2));
Iterator<PagedResponse<TableEntity>> iterator =
tableClient.listEntities().iterableByPage().iterator();
assertTrue(iterator.hasNext());
List<TableEntity> retrievedEntities = iterator.next().getValue();
assertEquals(2, retrievedEntities.size());
}
@Test
public void listEntitiesWithFilter() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
ListEntitiesOptions options = new ListEntitiesOptions().setFilter("RowKey eq '" + rowKeyValue + "'");
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2));
tableClient.listEntities(options, null, null).forEach(tableEntity -> {
assertEquals(partitionKeyValue, tableEntity.getPartitionKey());
assertEquals(rowKeyValue, tableEntity.getRowKey());
});
}
@Test
public void listEntitiesWithSelect() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue)
.addProperty("propertyC", "valueC")
.addProperty("propertyD", "valueD");
List<String> propertyList = new ArrayList<>();
propertyList.add("propertyC");
ListEntitiesOptions options = new ListEntitiesOptions()
.setSelect(propertyList);
tableClient.createEntity(entity);
Iterator<PagedResponse<TableEntity>> iterator =
tableClient.listEntities(options, null, null).iterableByPage().iterator();
assertTrue(iterator.hasNext());
TableEntity retrievedEntity = iterator.next().getValue().get(0);
assertNull(retrievedEntity.getPartitionKey());
assertNull(retrievedEntity.getRowKey());
assertEquals("valueC", retrievedEntity.getProperties().get("propertyC"));
assertNull(retrievedEntity.getProperties().get("propertyD"));
}
@Test
public void listEntitiesWithTop() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue3 = testResourceNamer.randomName("rowKey", 20);
ListEntitiesOptions options = new ListEntitiesOptions().setTop(2);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue3));
Iterator<PagedResponse<TableEntity>> iterator =
tableClient.listEntities(options, null, null).iterableByPage().iterator();
assertTrue(iterator.hasNext());
assertEquals(2, iterator.next().getValue().size());
}
/*@Test
public void listEntitiesSubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2));
Iterator<PagedResponse<TableEntity>> iterator =
tableClient.listEntities(SampleEntity.class).iterableByPage().iterator();
assertTrue(iterator.hasNext());
List<TableEntity> retrievedEntities = iterator.next().getValue();
TableEntity retrievedEntity = retrievedEntities.get(0);
TableEntity retrievedEntity2 = retrievedEntities.get(1);
assertEquals(partitionKeyValue, retrievedEntity.getPartitionKey());
assertEquals(rowKeyValue, retrievedEntity.getRowKey());
assertEquals(partitionKeyValue, retrievedEntity2.getPartitionKey());
assertEquals(rowKeyValue2, retrievedEntity2.getRowKey());
}*/
@Test
public void submitTransaction() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
int expectedBatchStatusCode = 202;
int expectedOperationStatusCode = 204;
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue2)));
final Response<TableTransactionResult> result =
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
assertNotNull(result);
assertEquals(expectedBatchStatusCode, result.getStatusCode());
assertEquals(transactionalBatch.size(), result.getValue().getTransactionActionResponses().size());
assertEquals(expectedOperationStatusCode,
result.getValue().getTransactionActionResponses().get(0).getStatusCode());
assertEquals(expectedOperationStatusCode,
result.getValue().getTransactionActionResponses().get(1).getStatusCode());
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, null, null);
final TableEntity entity = response.getValue();
assertNotNull(entity);
assertEquals(partitionKeyValue, entity.getPartitionKey());
assertEquals(rowKeyValue, entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertNotNull(entity.getProperties());
}
@Test
public void submitTransactionAllActions() {
submitTransactionAllActionsImpl("partitionKey", "rowKey");
}
@Test
public void submitTransactionAllActionsForEntitiesWithSingleQuotesInPartitionKey() {
submitTransactionAllActionsImpl("partition'Key", "rowKey");
}
@Test
public void submitTransactionAllActionsForEntitiesWithSingleQuotesInRowKey() {
submitTransactionAllActionsImpl("partitionKey", "row'Key");
}
private void submitTransactionAllActionsImpl(String partitionKeyPrefix, String rowKeyPrefix) {
String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
String rowKeyValueCreate = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertInsert = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertMerge = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertReplace = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpdateMerge = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpdateReplace = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueDelete = testResourceNamer.randomName(rowKeyPrefix, 20);
int expectedBatchStatusCode = 202;
int expectedOperationStatusCode = 204;
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueDelete));
TableEntity toUpsertMerge = new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge);
toUpsertMerge.addProperty("Test", "MergedValue");
TableEntity toUpsertReplace = new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace);
toUpsertReplace.addProperty("Test", "ReplacedValue");
TableEntity toUpdateMerge = new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge);
toUpdateMerge.addProperty("Test", "MergedValue");
TableEntity toUpdateReplace = new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace);
toUpdateReplace.addProperty("Test", "MergedValue");
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,
new TableEntity(partitionKeyValue, rowKeyValueCreate)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE,
new TableEntity(partitionKeyValue, rowKeyValueUpsertInsert)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE, toUpsertMerge));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_REPLACE, toUpsertReplace));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_MERGE, toUpdateMerge));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_REPLACE, toUpdateReplace));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,
new TableEntity(partitionKeyValue, rowKeyValueDelete)));
final Response<TableTransactionResult> response =
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
assertNotNull(response);
assertEquals(expectedBatchStatusCode, response.getStatusCode());
TableTransactionResult result = response.getValue();
assertEquals(transactionalBatch.size(), result.getTransactionActionResponses().size());
for (TableTransactionActionResponse subResponse : result.getTransactionActionResponses()) {
assertEquals(expectedOperationStatusCode, subResponse.getStatusCode());
}
}
@Test
public void submitTransactionWithFailingAction() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,
new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,
new TableEntity(partitionKeyValue, rowKeyValue2)));
try {
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
} catch (TableTransactionFailedException e) {
assertTrue(e.getMessage().contains("An action within the operation failed"));
assertTrue(e.getMessage().contains("The failed operation was"));
assertTrue(e.getMessage().contains("DeleteEntity"));
assertTrue(e.getMessage().contains("partitionKey='" + partitionKeyValue));
assertTrue(e.getMessage().contains("rowKey='" + rowKeyValue2));
return;
}
fail();
}
@Test
public void submitTransactionWithSameRowKeys() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
try {
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
} catch (TableTransactionFailedException e) {
assertTrue(e.getMessage().contains("An action within the operation failed"));
assertTrue(e.getMessage().contains("The failed operation was"));
assertTrue(e.getMessage().contains("CreateEntity"));
assertTrue(e.getMessage().contains("partitionKey='" + partitionKeyValue));
assertTrue(e.getMessage().contains("rowKey='" + rowKeyValue));
return;
} catch (TableServiceException e) {
assertTrue(IS_COSMOS_TEST);
assertEquals(400, e.getResponse().getStatusCode());
assertTrue(e.getMessage().contains("InvalidDuplicateRow"));
return;
}
fail();
}
@Test
public void submitTransactionWithDifferentPartitionKeys() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String partitionKeyValue2 = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue2, rowKeyValue2)));
try {
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
} catch (TableTransactionFailedException e) {
if (IS_COSMOS_TEST) {
assertTrue(e.getMessage().contains("An action within the operation failed"));
assertTrue(e.getMessage().contains("The failed operation was"));
assertTrue(e.getMessage().contains("CreateEntity"));
assertTrue(e.getMessage().contains("partitionKey='" + partitionKeyValue));
assertTrue(e.getMessage().contains("rowKey='" + rowKeyValue));
} else {
assertTrue(e.getMessage().contains("An action within the operation failed"));
assertTrue(e.getMessage().contains("The failed operation was"));
assertTrue(e.getMessage().contains("CreateEntity"));
assertTrue(e.getMessage().contains("partitionKey='" + partitionKeyValue2));
assertTrue(e.getMessage().contains("rowKey='" + rowKeyValue2));
}
return;
}
fail();
}
@Test
public void generateSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("r");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = tableClient.generateSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&tn=" + tableClient.getTableName()
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("raud");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final String startPartitionKey = "startPartitionKey";
final String startRowKey = "startRowKey";
final String endPartitionKey = "endPartitionKey";
final String endRowKey = "endRowKey";
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange)
.setStartPartitionKey(startPartitionKey)
.setStartRowKey(startRowKey)
.setEndPartitionKey(endPartitionKey)
.setEndRowKey(endRowKey);
final String sas = tableClient.generateSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&tn=" + tableClient.getTableName()
+ "&sp=raud"
+ "&spk=startPartitionKey"
+ "&srk=startRowKey"
+ "&epk=endPartitionKey"
+ "&erk=endRowKey"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Skipping Cosmos test.");
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("a");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = tableClient.generateSas(sasSignatureValues);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(tableClient.getTableEndpoint())
.sasToken(sas)
.tableName(tableClient.getTableName());
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableClient newTableClient = tableClientBuilder.buildClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode, newTableClient.createEntityWithResponse(entity, null, null).getStatusCode());
}
@Test
public void setAndListAccessPolicies() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and listing access policies is not supported on Cosmos endpoints.");
OffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
OffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
String permissions = "r";
TableAccessPolicy tableAccessPolicy = new TableAccessPolicy()
.setStartsOn(startTime)
.setExpiresOn(expiryTime)
.setPermissions(permissions);
String id = "testPolicy";
TableSignedIdentifier tableSignedIdentifier = new TableSignedIdentifier(id).setAccessPolicy(tableAccessPolicy);
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode,
tableClient.setAccessPoliciesWithResponse(Collections.singletonList(tableSignedIdentifier), null, null)
.getStatusCode());
TableAccessPolicies tableAccessPolicies = tableClient.getAccessPolicies();
assertNotNull(tableAccessPolicies);
assertNotNull(tableAccessPolicies.getIdentifiers());
TableSignedIdentifier signedIdentifier = tableAccessPolicies.getIdentifiers().get(0);
assertNotNull(signedIdentifier);
TableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();
assertNotNull(accessPolicy);
assertEquals(startTime, accessPolicy.getStartsOn());
assertEquals(expiryTime, accessPolicy.getExpiresOn());
assertEquals(permissions, accessPolicy.getPermissions());
assertEquals(id, signedIdentifier.getId());
}
@Test
public void setAndListMultipleAccessPolicies() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and listing access policies is not supported on Cosmos endpoints.");
OffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
OffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
String permissions = "r";
TableAccessPolicy tableAccessPolicy = new TableAccessPolicy()
.setStartsOn(startTime)
.setExpiresOn(expiryTime)
.setPermissions(permissions);
String id1 = "testPolicy1";
String id2 = "testPolicy2";
List<TableSignedIdentifier> tableSignedIdentifiers = new ArrayList<>();
tableSignedIdentifiers.add(new TableSignedIdentifier(id1).setAccessPolicy(tableAccessPolicy));
tableSignedIdentifiers.add(new TableSignedIdentifier(id2).setAccessPolicy(tableAccessPolicy));
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode,
tableClient.setAccessPoliciesWithResponse(tableSignedIdentifiers, null, null).getStatusCode());
TableAccessPolicies tableAccessPolicies = tableClient.getAccessPolicies();
assertNotNull(tableAccessPolicies);
assertNotNull(tableAccessPolicies.getIdentifiers());
assertEquals(2, tableAccessPolicies.getIdentifiers().size());
assertEquals(id1, tableAccessPolicies.getIdentifiers().get(0).getId());
assertEquals(id2, tableAccessPolicies.getIdentifiers().get(1).getId());
for (TableSignedIdentifier signedIdentifier : tableAccessPolicies.getIdentifiers()) {
assertNotNull(signedIdentifier);
TableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();
assertNotNull(accessPolicy);
assertEquals(startTime, accessPolicy.getStartsOn());
assertEquals(expiryTime, accessPolicy.getExpiresOn());
assertEquals(permissions, accessPolicy.getPermissions());
}
}
} | class TableClientTest extends TableClientTestBase {
private TableClient tableClient;
protected void beforeTest() {
final String tableName = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
tableClient = getClientBuilder(tableName, connectionString).buildClient();
tableClient.createTable();
}
@Test
public void createTable() {
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
final TableClient tableClient2 = getClientBuilder(tableName2, connectionString).buildClient();
assertNotNull(tableClient2.createTable());
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void createTableWithMultipleTenants() {
Assumptions.assumeTrue(tableClient.getTableEndpoint().contains("core.windows.net")
&& tableClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableClient tableClient2 =
getClientBuilder(tableName2, Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
assertNotNull(tableClient2.createTable());
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
assertDoesNotThrow(() -> tableClient2.createEntity(tableEntity));
}
@Test
public void createTableWithResponse() {
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
final TableClient tableClient2 = getClientBuilder(tableName2, connectionString).buildClient();
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode, tableClient2.createTableWithResponse(null, null).getStatusCode());
}
@Test
public void createEntity() {
createEntityImpl("partitionKey", "rowKey");
}
@Test
public void createEntityWithSingleQuotesInPartitionKey() {
createEntityImpl("partition'Key", "rowKey");
}
@Test
public void createEntityWithSingleQuotesInRowKey() {
createEntityImpl("partitionKey", "row'Key");
}
private void createEntityImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
assertDoesNotThrow(() -> tableClient.createEntity(tableEntity));
}
@Test
public void createEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode, tableClient.createEntityWithResponse(entity, null, null).getStatusCode());
}
@Test
public void createEntityWithAllSupportedDataTypes() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final boolean booleanValue = true;
final byte[] binaryValue = "Test value".getBytes();
final Date dateValue = new Date();
final OffsetDateTime offsetDateTimeValue = OffsetDateTime.now();
final double doubleValue = 2.0d;
final UUID guidValue = UUID.randomUUID();
final int int32Value = 1337;
final long int64Value = 1337L;
final String stringValue = "This is table entity";
tableEntity.addProperty("BinaryTypeProperty", binaryValue);
tableEntity.addProperty("BooleanTypeProperty", booleanValue);
tableEntity.addProperty("DateTypeProperty", dateValue);
tableEntity.addProperty("OffsetDateTimeTypeProperty", offsetDateTimeValue);
tableEntity.addProperty("DoubleTypeProperty", doubleValue);
tableEntity.addProperty("GuidTypeProperty", guidValue);
tableEntity.addProperty("Int32TypeProperty", int32Value);
tableEntity.addProperty("Int64TypeProperty", int64Value);
tableEntity.addProperty("StringTypeProperty", stringValue);
tableClient.createEntity(tableEntity);
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, null, null);
final TableEntity entity = response.getValue();
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.get("BinaryTypeProperty") instanceof byte[]);
assertTrue(properties.get("BooleanTypeProperty") instanceof Boolean);
assertTrue(properties.get("DateTypeProperty") instanceof OffsetDateTime);
assertTrue(properties.get("OffsetDateTimeTypeProperty") instanceof OffsetDateTime);
assertTrue(properties.get("DoubleTypeProperty") instanceof Double);
assertTrue(properties.get("GuidTypeProperty") instanceof UUID);
assertTrue(properties.get("Int32TypeProperty") instanceof Integer);
assertTrue(properties.get("Int64TypeProperty") instanceof Long);
assertTrue(properties.get("StringTypeProperty") instanceof String);
}
/*@Test
public void createEntitySubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
byte[] bytes = new byte[]{1, 2, 3};
boolean b = true;
OffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
double d = 1.23D;
UUID uuid = UUID.fromString("11111111-2222-3333-4444-555555555555");
int i = 123;
long l = 123L;
String s = "Test";
SampleEntity.Color color = SampleEntity.Color.GREEN;
SampleEntity tableEntity = new SampleEntity(partitionKeyValue, rowKeyValue);
tableEntity.setByteField(bytes);
tableEntity.setBooleanField(b);
tableEntity.setDateTimeField(dateTime);
tableEntity.setDoubleField(d);
tableEntity.setUuidField(uuid);
tableEntity.setIntField(i);
tableEntity.setLongField(l);
tableEntity.setStringField(s);
tableEntity.setEnumField(color);
tableClient.createEntity(tableEntity);
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, null, null);
TableEntity entity = response.getValue();
assertArrayEquals((byte[]) entity.getProperties().get("ByteField"), bytes);
assertEquals(entity.getProperties().get("BooleanField"), b);
assertTrue(dateTime.isEqual((OffsetDateTime) entity.getProperties().get("DateTimeField")));
assertEquals(entity.getProperties().get("DoubleField"), d);
assertEquals(0, uuid.compareTo((UUID) entity.getProperties().get("UuidField")));
assertEquals(entity.getProperties().get("IntField"), i);
assertEquals(entity.getProperties().get("LongField"), l);
assertEquals(entity.getProperties().get("StringField"), s);
assertEquals(entity.getProperties().get("EnumField"), color.name());
}*/
@Test
public void deleteTable() {
assertDoesNotThrow(() -> tableClient.deleteTable());
}
@Test
public void deleteNonExistingTable() {
tableClient.deleteTable();
assertDoesNotThrow(() -> tableClient.deleteTable());
}
@Test
public void deleteTableWithResponse() {
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode, tableClient.deleteTableWithResponse(null, null).getStatusCode());
}
@Test
public void deleteNonExistingTableWithResponse() {
final int expectedStatusCode = 404;
tableClient.deleteTableWithResponse(null, null);
assertEquals(expectedStatusCode, tableClient.deleteTableWithResponse(null, null).getStatusCode());
}
@Test
public void deleteEntity() {
deleteEntityImpl("partitionKey", "rowKey");
}
@Test
public void deleteEntityWithSingleQuotesInPartitionKey() {
deleteEntityImpl("partition'Key", "rowKey");
}
@Test
public void deleteEntityWithSingleQuotesInRowKey() {
deleteEntityImpl("partitionKey", "row'Key");
}
private void deleteEntityImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableClient.createEntity(tableEntity);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
assertDoesNotThrow(() -> tableClient.deleteEntity(partitionKeyValue, rowKeyValue));
}
@Test
public void deleteNonExistingEntity() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
assertDoesNotThrow(() -> tableClient.deleteEntity(partitionKeyValue, rowKeyValue));
}
@Test
public void deleteEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
tableClient.createEntity(tableEntity);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
assertEquals(expectedStatusCode,
tableClient.deleteEntityWithResponse(createdEntity, false, null, null).getStatusCode());
}
@Test
public void deleteNonExistingEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 404;
assertEquals(expectedStatusCode,
tableClient.deleteEntityWithResponse(entity, false, null, null).getStatusCode());
}
@Test
public void deleteEntityWithResponseMatchETag() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
tableClient.createEntity(tableEntity);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
assertEquals(expectedStatusCode,
tableClient.deleteEntityWithResponse(createdEntity, true, null, null).getStatusCode());
}
@Test
public void getEntityWithSingleQuotesInPartitionKey() {
getEntityWithResponseImpl(tableClient, testResourceNamer, "partition'Key", "rowKey");
}
@Test
public void getEntityWithSingleQuotesInRowKey() {
getEntityWithResponseImpl(tableClient, testResourceNamer, "partitionKey", "row'Key");
}
@Test
public void getEntityWithResponse() {
getEntityWithResponseImpl(tableClient, testResourceNamer, "partitionKey", "rowKey");
}
static void getEntityWithResponseImpl(TableClient tableClient, TestResourceNamer testResourceNamer,
String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 200;
tableClient.createEntity(tableEntity);
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, null, null);
final TableEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());
assertEquals(tableEntity.getRowKey(), entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertNotNull(entity.getProperties());
}
@Test
public void getEntityWithResponseWithSelect() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableEntity.addProperty("Test", "Value");
final int expectedStatusCode = 200;
tableClient.createEntity(tableEntity);
List<String> propertyList = new ArrayList<>();
propertyList.add("Test");
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, propertyList, null, null);
final TableEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertNull(entity.getPartitionKey());
assertNull(entity.getRowKey());
assertNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertEquals(entity.getProperties().get("Test"), "Value");
}
@Test
public void updateEntityWithSingleQuotesInPartitionKey() {
updateEntityWithResponseImpl(TableEntityUpdateMode.MERGE, testResourceNamer.randomName("partition'Key", 20),
testResourceNamer.randomName("rowKey", 20));
}
@Test
public void updateEntityWithSingleQuotesInRowKey() {
updateEntityWithResponseImpl(TableEntityUpdateMode.MERGE, testResourceNamer.randomName("partitionKey", 20),
testResourceNamer.randomName("row'Key", 20));
}
/*@Test
public void getEntityWithResponseSubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
byte[] bytes = new byte[]{1, 2, 3};
boolean b = true;
OffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
double d = 1.23D;
UUID uuid = UUID.fromString("11111111-2222-3333-4444-555555555555");
int i = 123;
long l = 123L;
String s = "Test";
SampleEntity.Color color = SampleEntity.Color.GREEN;
final Map<String, Object> props = new HashMap<>();
props.put("ByteField", bytes);
props.put("BooleanField", b);
props.put("DateTimeField", dateTime);
props.put("DoubleField", d);
props.put("UuidField", uuid);
props.put("IntField", i);
props.put("LongField", l);
props.put("StringField", s);
props.put("EnumField", color);
TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableEntity.setProperties(props);
int expectedStatusCode = 200;
tableClient.createEntity(tableEntity);
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, SampleEntity.class, null, null);
SampleEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());
assertEquals(tableEntity.getRowKey(), entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertArrayEquals(bytes, entity.getByteField());
assertEquals(b, entity.getBooleanField());
assertTrue(dateTime.isEqual(entity.getDateTimeField()));
assertEquals(d, entity.getDoubleField());
assertEquals(0, uuid.compareTo(entity.getUuidField()));
assertEquals(i, entity.getIntField());
assertEquals(l, entity.getLongField());
assertEquals(s, entity.getStringField());
assertEquals(color, entity.getEnumField());
}*/
@Test
public void updateEntityWithResponseReplace() {
updateEntityWithResponseImpl(TableEntityUpdateMode.REPLACE, "partitionKey", "rowKey");
}
@Test
public void updateEntityWithResponseMerge() {
updateEntityWithResponseImpl(TableEntityUpdateMode.MERGE, "partitionKey", "rowKey");
}
/**
* In the case of {@link TableEntityUpdateMode
* In the case of {@link TableEntityUpdateMode
*/
void updateEntityWithResponseImpl(TableEntityUpdateMode mode, String partitionKeyPrefix, String rowKeyPrefix) {
final boolean expectOldProperty = mode == TableEntityUpdateMode.MERGE;
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final int expectedStatusCode = 204;
final String oldPropertyKey = "propertyA";
final String newPropertyKey = "propertyB";
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue)
.addProperty(oldPropertyKey, "valueA");
tableClient.createEntity(tableEntity);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
createdEntity.getProperties().remove(oldPropertyKey);
createdEntity.addProperty(newPropertyKey, "valueB");
assertEquals(expectedStatusCode,
tableClient.updateEntityWithResponse(createdEntity, mode, true, null, null).getStatusCode());
TableEntity entity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.containsKey(newPropertyKey));
assertEquals(expectOldProperty, properties.containsKey(oldPropertyKey));
}
/*@Test
public void updateEntityWithResponseSubclass() {
String partitionKeyValue = testResourceNamer.randomName("APartitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("ARowKey", 20);
int expectedStatusCode = 204;
SingleFieldEntity tableEntity = new SingleFieldEntity(partitionKeyValue, rowKeyValue);
tableEntity.setSubclassProperty("InitialValue");
tableClient.createEntity(tableEntity);
tableEntity.setSubclassProperty("UpdatedValue");
assertEquals(expectedStatusCode,
tableClient.updateEntityWithResponse(tableEntity, TableEntityUpdateMode.REPLACE, true, null, null)
.getStatusCode()));
TableEntity entity = tableClient.getEntity(partitionKeyValue, rowKeyValue);
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.containsKey("SubclassProperty"));
assertEquals("UpdatedValue", properties.get("SubclassProperty"));
}*/
@Test
public void listEntities() {
listEntitiesImpl("partitionKey", "rowKey");
}
@Test
public void listEntitiesWithSingleQuotesInPartitionKey() {
listEntitiesImpl("partition'Key", "rowKey");
}
@Test
public void listEntitiesWithSingleQuotesInRowKey() {
listEntitiesImpl("partitionKey", "row'Key");
}
private void listEntitiesImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final String rowKeyValue2 = testResourceNamer.randomName(rowKeyPrefix, 20);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2));
Iterator<PagedResponse<TableEntity>> iterator =
tableClient.listEntities().iterableByPage().iterator();
assertTrue(iterator.hasNext());
List<TableEntity> retrievedEntities = iterator.next().getValue();
assertEquals(2, retrievedEntities.size());
}
@Test
public void listEntitiesWithFilter() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
ListEntitiesOptions options = new ListEntitiesOptions().setFilter("RowKey eq '" + rowKeyValue + "'");
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2));
tableClient.listEntities(options, null, null).forEach(tableEntity -> {
assertEquals(partitionKeyValue, tableEntity.getPartitionKey());
assertEquals(rowKeyValue, tableEntity.getRowKey());
});
}
@Test
public void listEntitiesWithSelect() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue)
.addProperty("propertyC", "valueC")
.addProperty("propertyD", "valueD");
List<String> propertyList = new ArrayList<>();
propertyList.add("propertyC");
ListEntitiesOptions options = new ListEntitiesOptions()
.setSelect(propertyList);
tableClient.createEntity(entity);
Iterator<PagedResponse<TableEntity>> iterator =
tableClient.listEntities(options, null, null).iterableByPage().iterator();
assertTrue(iterator.hasNext());
TableEntity retrievedEntity = iterator.next().getValue().get(0);
assertNull(retrievedEntity.getPartitionKey());
assertNull(retrievedEntity.getRowKey());
assertEquals("valueC", retrievedEntity.getProperties().get("propertyC"));
assertNull(retrievedEntity.getProperties().get("propertyD"));
}
@Test
public void listEntitiesWithTop() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue3 = testResourceNamer.randomName("rowKey", 20);
ListEntitiesOptions options = new ListEntitiesOptions().setTop(2);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue3));
Iterator<PagedResponse<TableEntity>> iterator =
tableClient.listEntities(options, null, null).iterableByPage().iterator();
assertTrue(iterator.hasNext());
assertEquals(2, iterator.next().getValue().size());
}
/*@Test
public void listEntitiesSubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2));
Iterator<PagedResponse<TableEntity>> iterator =
tableClient.listEntities(SampleEntity.class).iterableByPage().iterator();
assertTrue(iterator.hasNext());
List<TableEntity> retrievedEntities = iterator.next().getValue();
TableEntity retrievedEntity = retrievedEntities.get(0);
TableEntity retrievedEntity2 = retrievedEntities.get(1);
assertEquals(partitionKeyValue, retrievedEntity.getPartitionKey());
assertEquals(rowKeyValue, retrievedEntity.getRowKey());
assertEquals(partitionKeyValue, retrievedEntity2.getPartitionKey());
assertEquals(rowKeyValue2, retrievedEntity2.getRowKey());
}*/
@Test
public void submitTransaction() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
int expectedBatchStatusCode = 202;
int expectedOperationStatusCode = 204;
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue2)));
final Response<TableTransactionResult> result =
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
assertNotNull(result);
assertEquals(expectedBatchStatusCode, result.getStatusCode());
assertEquals(transactionalBatch.size(), result.getValue().getTransactionActionResponses().size());
assertEquals(expectedOperationStatusCode,
result.getValue().getTransactionActionResponses().get(0).getStatusCode());
assertEquals(expectedOperationStatusCode,
result.getValue().getTransactionActionResponses().get(1).getStatusCode());
final Response<TableEntity> response =
tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, null, null);
final TableEntity entity = response.getValue();
assertNotNull(entity);
assertEquals(partitionKeyValue, entity.getPartitionKey());
assertEquals(rowKeyValue, entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertNotNull(entity.getProperties());
}
@Test
public void submitTransactionAllActions() {
submitTransactionAllActionsImpl("partitionKey", "rowKey");
}
@Test
public void submitTransactionAllActionsForEntitiesWithSingleQuotesInPartitionKey() {
submitTransactionAllActionsImpl("partition'Key", "rowKey");
}
@Test
public void submitTransactionAllActionsForEntitiesWithSingleQuotesInRowKey() {
submitTransactionAllActionsImpl("partitionKey", "row'Key");
}
private void submitTransactionAllActionsImpl(String partitionKeyPrefix, String rowKeyPrefix) {
String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
String rowKeyValueCreate = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertInsert = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertMerge = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertReplace = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpdateMerge = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpdateReplace = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueDelete = testResourceNamer.randomName(rowKeyPrefix, 20);
int expectedBatchStatusCode = 202;
int expectedOperationStatusCode = 204;
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace));
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueDelete));
TableEntity toUpsertMerge = new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge);
toUpsertMerge.addProperty("Test", "MergedValue");
TableEntity toUpsertReplace = new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace);
toUpsertReplace.addProperty("Test", "ReplacedValue");
TableEntity toUpdateMerge = new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge);
toUpdateMerge.addProperty("Test", "MergedValue");
TableEntity toUpdateReplace = new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace);
toUpdateReplace.addProperty("Test", "MergedValue");
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,
new TableEntity(partitionKeyValue, rowKeyValueCreate)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE,
new TableEntity(partitionKeyValue, rowKeyValueUpsertInsert)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE, toUpsertMerge));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_REPLACE, toUpsertReplace));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_MERGE, toUpdateMerge));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_REPLACE, toUpdateReplace));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,
new TableEntity(partitionKeyValue, rowKeyValueDelete)));
final Response<TableTransactionResult> response =
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
assertNotNull(response);
assertEquals(expectedBatchStatusCode, response.getStatusCode());
TableTransactionResult result = response.getValue();
assertEquals(transactionalBatch.size(), result.getTransactionActionResponses().size());
for (TableTransactionActionResponse subResponse : result.getTransactionActionResponses()) {
assertEquals(expectedOperationStatusCode, subResponse.getStatusCode());
}
}
@Test
public void submitTransactionWithFailingAction() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,
new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,
new TableEntity(partitionKeyValue, rowKeyValue2)));
try {
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
} catch (TableTransactionFailedException e) {
assertTrue(e.getMessage().contains("An action within the operation failed"));
assertTrue(e.getMessage().contains("The failed operation was"));
assertTrue(e.getMessage().contains("DeleteEntity"));
assertTrue(e.getMessage().contains("partitionKey='" + partitionKeyValue));
assertTrue(e.getMessage().contains("rowKey='" + rowKeyValue2));
return;
}
fail();
}
@Test
public void submitTransactionWithSameRowKeys() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
try {
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
} catch (TableTransactionFailedException e) {
assertTrue(e.getMessage().contains("An action within the operation failed"));
assertTrue(e.getMessage().contains("The failed operation was"));
assertTrue(e.getMessage().contains("CreateEntity"));
assertTrue(e.getMessage().contains("partitionKey='" + partitionKeyValue));
assertTrue(e.getMessage().contains("rowKey='" + rowKeyValue));
return;
} catch (TableServiceException e) {
assertTrue(IS_COSMOS_TEST);
assertEquals(400, e.getResponse().getStatusCode());
assertTrue(e.getMessage().contains("InvalidDuplicateRow"));
return;
}
fail();
}
@Test
public void submitTransactionWithDifferentPartitionKeys() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String partitionKeyValue2 = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue2, rowKeyValue2)));
try {
tableClient.submitTransactionWithResponse(transactionalBatch, null, null);
} catch (TableTransactionFailedException e) {
if (IS_COSMOS_TEST) {
assertTrue(e.getMessage().contains("An action within the operation failed"));
assertTrue(e.getMessage().contains("The failed operation was"));
assertTrue(e.getMessage().contains("CreateEntity"));
assertTrue(e.getMessage().contains("partitionKey='" + partitionKeyValue));
assertTrue(e.getMessage().contains("rowKey='" + rowKeyValue));
} else {
assertTrue(e.getMessage().contains("An action within the operation failed"));
assertTrue(e.getMessage().contains("The failed operation was"));
assertTrue(e.getMessage().contains("CreateEntity"));
assertTrue(e.getMessage().contains("partitionKey='" + partitionKeyValue2));
assertTrue(e.getMessage().contains("rowKey='" + rowKeyValue2));
}
return;
}
fail();
}
@Test
public void generateSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("r");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = tableClient.generateSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&tn=" + tableClient.getTableName()
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("raud");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final String startPartitionKey = "startPartitionKey";
final String startRowKey = "startRowKey";
final String endPartitionKey = "endPartitionKey";
final String endRowKey = "endRowKey";
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange)
.setStartPartitionKey(startPartitionKey)
.setStartRowKey(startRowKey)
.setEndPartitionKey(endPartitionKey)
.setEndRowKey(endRowKey);
final String sas = tableClient.generateSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&tn=" + tableClient.getTableName()
+ "&sp=raud"
+ "&spk=startPartitionKey"
+ "&srk=startRowKey"
+ "&epk=endPartitionKey"
+ "&erk=endRowKey"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Skipping Cosmos test.");
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("a");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = tableClient.generateSas(sasSignatureValues);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(tableClient.getTableEndpoint())
.sasToken(sas)
.tableName(tableClient.getTableName());
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableClient newTableClient = tableClientBuilder.buildClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode, newTableClient.createEntityWithResponse(entity, null, null).getStatusCode());
}
@Test
public void setAndListAccessPolicies() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and listing access policies is not supported on Cosmos endpoints.");
OffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
OffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
String permissions = "r";
TableAccessPolicy tableAccessPolicy = new TableAccessPolicy()
.setStartsOn(startTime)
.setExpiresOn(expiryTime)
.setPermissions(permissions);
String id = "testPolicy";
TableSignedIdentifier tableSignedIdentifier = new TableSignedIdentifier(id).setAccessPolicy(tableAccessPolicy);
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode,
tableClient.setAccessPoliciesWithResponse(Collections.singletonList(tableSignedIdentifier), null, null)
.getStatusCode());
TableAccessPolicies tableAccessPolicies = tableClient.getAccessPolicies();
assertNotNull(tableAccessPolicies);
assertNotNull(tableAccessPolicies.getIdentifiers());
TableSignedIdentifier signedIdentifier = tableAccessPolicies.getIdentifiers().get(0);
assertNotNull(signedIdentifier);
TableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();
assertNotNull(accessPolicy);
assertEquals(startTime, accessPolicy.getStartsOn());
assertEquals(expiryTime, accessPolicy.getExpiresOn());
assertEquals(permissions, accessPolicy.getPermissions());
assertEquals(id, signedIdentifier.getId());
}
@Test
public void setAndListMultipleAccessPolicies() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and listing access policies is not supported on Cosmos endpoints.");
OffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
OffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
String permissions = "r";
TableAccessPolicy tableAccessPolicy = new TableAccessPolicy()
.setStartsOn(startTime)
.setExpiresOn(expiryTime)
.setPermissions(permissions);
String id1 = "testPolicy1";
String id2 = "testPolicy2";
List<TableSignedIdentifier> tableSignedIdentifiers = new ArrayList<>();
tableSignedIdentifiers.add(new TableSignedIdentifier(id1).setAccessPolicy(tableAccessPolicy));
tableSignedIdentifiers.add(new TableSignedIdentifier(id2).setAccessPolicy(tableAccessPolicy));
final int expectedStatusCode = 204;
assertEquals(expectedStatusCode,
tableClient.setAccessPoliciesWithResponse(tableSignedIdentifiers, null, null).getStatusCode());
TableAccessPolicies tableAccessPolicies = tableClient.getAccessPolicies();
assertNotNull(tableAccessPolicies);
assertNotNull(tableAccessPolicies.getIdentifiers());
assertEquals(2, tableAccessPolicies.getIdentifiers().size());
assertEquals(id1, tableAccessPolicies.getIdentifiers().get(0).getId());
assertEquals(id2, tableAccessPolicies.getIdentifiers().get(1).getId());
for (TableSignedIdentifier signedIdentifier : tableAccessPolicies.getIdentifiers()) {
assertNotNull(signedIdentifier);
TableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();
assertNotNull(accessPolicy);
assertEquals(startTime, accessPolicy.getStartsOn());
assertEquals(expiryTime, accessPolicy.getExpiresOn());
assertEquals(permissions, accessPolicy.getPermissions());
}
}
} |
Let's see if we need to skip paging and polling methods here like I mentioned on my other comment. | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertAsync()
.build();
} | .build(); | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertAsync()
.build();
} | class TableAsyncClientTest extends TableClientTestBase {
private static final Duration TIMEOUT = Duration.ofSeconds(100);
private TableAsyncClient tableClient;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(TIMEOUT);
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
protected void beforeTest() {
final String tableName = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
tableClient = getClientBuilder(tableName, connectionString).buildAsyncClient();
tableClient.createTable().block(TIMEOUT);
}
@Test
public void createTable() {
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
final TableAsyncClient tableClient2 = getClientBuilder(tableName2, connectionString).buildAsyncClient();
StepVerifier.create(tableClient2.createTable())
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void createTableWithMultipleTenants() {
Assumptions.assumeTrue(tableClient.getTableEndpoint().contains("core.windows.net")
&& tableClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableAsyncClient tableClient2 =
getClientBuilder(tableName2, Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
StepVerifier.create(tableClient2.createTable())
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
StepVerifier.create(tableClient2.createEntity(tableEntity))
.expectComplete()
.verify();
}
@Test
public void createTableWithResponse() {
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
final TableAsyncClient tableClient2 = getClientBuilder(tableName2, connectionString).buildAsyncClient();
final int expectedStatusCode = 204;
StepVerifier.create(tableClient2.createTableWithResponse())
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
})
.expectComplete()
.verify();
}
@Test
public void createEntity() {
createEntityImpl("partitionKey", "rowKey");
}
@Test
public void createEntityWithSingleQuotesInPartitionKey() {
createEntityImpl("partition'Key", "rowKey");
}
@Test
public void createEntityWithSingleQuotesInRowKey() {
createEntityImpl("partitionKey", "row'Key");
}
private void createEntityImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
StepVerifier.create(tableClient.createEntity(tableEntity))
.expectComplete()
.verify();
}
@Test
public void createEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void createEntityWithAllSupportedDataTypes() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final boolean booleanValue = true;
final byte[] binaryValue = "Test value".getBytes();
final Date dateValue = new Date();
final OffsetDateTime offsetDateTimeValue = OffsetDateTime.now();
final double doubleValue = 2.0d;
final UUID guidValue = UUID.randomUUID();
final int int32Value = 1337;
final long int64Value = 1337L;
final String stringValue = "This is table entity";
tableEntity.addProperty("BinaryTypeProperty", binaryValue);
tableEntity.addProperty("BooleanTypeProperty", booleanValue);
tableEntity.addProperty("DateTypeProperty", dateValue);
tableEntity.addProperty("OffsetDateTimeTypeProperty", offsetDateTimeValue);
tableEntity.addProperty("DoubleTypeProperty", doubleValue);
tableEntity.addProperty("GuidTypeProperty", guidValue);
tableEntity.addProperty("Int32TypeProperty", int32Value);
tableEntity.addProperty("Int64TypeProperty", int64Value);
tableEntity.addProperty("StringTypeProperty", stringValue);
tableClient.createEntity(tableEntity).block(TIMEOUT);
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))
.assertNext(response -> {
final TableEntity entity = response.getValue();
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.get("BinaryTypeProperty") instanceof byte[]);
assertTrue(properties.get("BooleanTypeProperty") instanceof Boolean);
assertTrue(properties.get("DateTypeProperty") instanceof OffsetDateTime);
assertTrue(properties.get("OffsetDateTimeTypeProperty") instanceof OffsetDateTime);
assertTrue(properties.get("DoubleTypeProperty") instanceof Double);
assertTrue(properties.get("GuidTypeProperty") instanceof UUID);
assertTrue(properties.get("Int32TypeProperty") instanceof Integer);
assertTrue(properties.get("Int64TypeProperty") instanceof Long);
assertTrue(properties.get("StringTypeProperty") instanceof String);
})
.expectComplete()
.verify();
}
/*@Test
public void createEntitySubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
byte[] bytes = new byte[]{1, 2, 3};
boolean b = true;
OffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
double d = 1.23D;
UUID uuid = UUID.fromString("11111111-2222-3333-4444-555555555555");
int i = 123;
long l = 123L;
String s = "Test";
SampleEntity.Color color = SampleEntity.Color.GREEN;
SampleEntity tableEntity = new SampleEntity(partitionKeyValue, rowKeyValue);
tableEntity.setByteField(bytes);
tableEntity.setBooleanField(b);
tableEntity.setDateTimeField(dateTime);
tableEntity.setDoubleField(d);
tableEntity.setUuidField(uuid);
tableEntity.setIntField(i);
tableEntity.setLongField(l);
tableEntity.setStringField(s);
tableEntity.setEnumField(color);
tableClient.createEntity(tableEntity).block(TIMEOUT);
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))
.assertNext(response -> {
TableEntity entity = response.getValue();
assertArrayEquals((byte[]) entity.getProperties().get("ByteField"), bytes);
assertEquals(entity.getProperties().get("BooleanField"), b);
assertTrue(dateTime.isEqual((OffsetDateTime) entity.getProperties().get("DateTimeField")));
assertEquals(entity.getProperties().get("DoubleField"), d);
assertEquals(0, uuid.compareTo((UUID) entity.getProperties().get("UuidField")));
assertEquals(entity.getProperties().get("IntField"), i);
assertEquals(entity.getProperties().get("LongField"), l);
assertEquals(entity.getProperties().get("StringField"), s);
assertEquals(entity.getProperties().get("EnumField"), color.name());
})
.expectComplete()
.verify();
}*/
@Test
public void deleteTable() {
StepVerifier.create(tableClient.deleteTable())
.expectComplete()
.verify();
}
@Test
public void deleteNonExistingTable() {
tableClient.deleteTable().block();
StepVerifier.create(tableClient.deleteTable())
.expectComplete()
.verify();
}
@Test
public void deleteTableWithResponse() {
final int expectedStatusCode = 204;
StepVerifier.create(tableClient.deleteTableWithResponse())
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
})
.expectComplete()
.verify();
}
@Test
public void deleteNonExistingTableWithResponse() {
final int expectedStatusCode = 404;
tableClient.deleteTableWithResponse().block();
StepVerifier.create(tableClient.deleteTableWithResponse())
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void deleteEntity() {
deleteEntityImpl("partitionKey", "rowKey");
}
@Test
public void deleteEntityWithSingleQuotesInPartitionKey() {
deleteEntityImpl("partition'Key", "rowKey");
}
@Test
public void deleteEntityWithSingleQuotesInRowKey() {
deleteEntityImpl("partitionKey", "row'Key");
}
private void deleteEntityImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableClient.createEntity(tableEntity).block(TIMEOUT);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
StepVerifier.create(tableClient.deleteEntity(partitionKeyValue, rowKeyValue))
.expectComplete()
.verify();
}
@Test
public void deleteNonExistingEntity() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
StepVerifier.create(tableClient.deleteEntity(partitionKeyValue, rowKeyValue))
.expectComplete()
.verify();
}
@Test
public void deleteEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
tableClient.createEntity(tableEntity).block(TIMEOUT);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
StepVerifier.create(tableClient.deleteEntityWithResponse(createdEntity, false))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void deleteNonExistingEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 404;
StepVerifier.create(tableClient.deleteEntityWithResponse(entity, false))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void deleteEntityWithResponseMatchETag() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
tableClient.createEntity(tableEntity).block(TIMEOUT);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
StepVerifier.create(tableClient.deleteEntityWithResponse(createdEntity, true))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void getEntityWithSingleQuotesInPartitionKey() {
getEntityWithResponseAsyncImpl(this.tableClient, this.testResourceNamer, "partition'Key", "rowKey");
}
@Test
public void getEntityWithSingleQuotesInRowKey() {
getEntityWithResponseAsyncImpl(this.tableClient, this.testResourceNamer, "partitionKey", "row'Key");
}
@Test
public void getEntityWithResponse() {
getEntityWithResponseAsyncImpl(this.tableClient, this.testResourceNamer, "partitionKey", "rowKey");
}
static void getEntityWithResponseAsyncImpl(TableAsyncClient tableClient, TestResourceNamer testResourceNamer,
String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 200;
tableClient.createEntity(tableEntity).block(TIMEOUT);
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))
.assertNext(response -> {
final TableEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());
assertEquals(tableEntity.getRowKey(), entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertNotNull(entity.getProperties());
})
.expectComplete()
.verify();
}
@Test
public void getEntityWithResponseWithSelect() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableEntity.addProperty("Test", "Value");
final int expectedStatusCode = 200;
tableClient.createEntity(tableEntity).block(TIMEOUT);
List<String> propertyList = new ArrayList<>();
propertyList.add("Test");
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, propertyList))
.assertNext(response -> {
final TableEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertNull(entity.getPartitionKey());
assertNull(entity.getRowKey());
assertNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertEquals(entity.getProperties().get("Test"), "Value");
})
.expectComplete()
.verify();
}
@Test
public void updateEntityWithSingleQuotesInPartitionKey() {
updateEntityWithResponseAsync(TableEntityUpdateMode.REPLACE, "partition'Key", "rowKey");
}
@Test
public void updateEntityWithSingleQuotesInRowKey() {
updateEntityWithResponseAsync(TableEntityUpdateMode.REPLACE, "partitionKey", "row'Key");
}
/*@Test
public void getEntityWithResponseSubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
byte[] bytes = new byte[]{1, 2, 3};
boolean b = true;
OffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
double d = 1.23D;
UUID uuid = UUID.fromString("11111111-2222-3333-4444-555555555555");
int i = 123;
long l = 123L;
String s = "Test";
SampleEntity.Color color = SampleEntity.Color.GREEN;
final Map<String, Object> props = new HashMap<>();
props.put("ByteField", bytes);
props.put("BooleanField", b);
props.put("DateTimeField", dateTime);
props.put("DoubleField", d);
props.put("UuidField", uuid);
props.put("IntField", i);
props.put("LongField", l);
props.put("StringField", s);
props.put("EnumField", color);
TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableEntity.setProperties(props);
int expectedStatusCode = 200;
tableClient.createEntity(tableEntity).block(TIMEOUT);
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, SampleEntity.class))
.assertNext(response -> {
SampleEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());
assertEquals(tableEntity.getRowKey(), entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertArrayEquals(bytes, entity.getByteField());
assertEquals(b, entity.getBooleanField());
assertTrue(dateTime.isEqual(entity.getDateTimeField()));
assertEquals(d, entity.getDoubleField());
assertEquals(0, uuid.compareTo(entity.getUuidField()));
assertEquals(i, entity.getIntField());
assertEquals(l, entity.getLongField());
assertEquals(s, entity.getStringField());
assertEquals(color, entity.getEnumField());
})
.expectComplete()
.verify();
}*/
@Test
public void updateEntityWithResponseReplace() {
updateEntityWithResponseAsync(TableEntityUpdateMode.REPLACE, "partitionKey", "rowKey");
}
@Test
public void updateEntityWithResponseMerge() {
updateEntityWithResponseAsync(TableEntityUpdateMode.MERGE, "partitionKey", "rowKey");
}
/**
* In the case of {@link TableEntityUpdateMode
* In the case of {@link TableEntityUpdateMode
*/
void updateEntityWithResponseAsync(TableEntityUpdateMode mode, String partitionKeyPrefix, String rowKeyPrefix) {
final boolean expectOldProperty = mode == TableEntityUpdateMode.MERGE;
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final int expectedStatusCode = 204;
final String oldPropertyKey = "propertyA";
final String newPropertyKey = "propertyB";
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue)
.addProperty(oldPropertyKey, "valueA");
tableClient.createEntity(tableEntity).block(TIMEOUT);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
createdEntity.getProperties().remove(oldPropertyKey);
createdEntity.addProperty(newPropertyKey, "valueB");
StepVerifier.create(tableClient.updateEntityWithResponse(createdEntity, mode, true))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
StepVerifier.create(tableClient.getEntity(partitionKeyValue, rowKeyValue))
.assertNext(entity -> {
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.containsKey(newPropertyKey));
assertEquals(expectOldProperty, properties.containsKey(oldPropertyKey));
})
.verifyComplete();
}
/*@Test
public void updateEntityWithResponseSubclass() {
String partitionKeyValue = testResourceNamer.randomName("APartitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("ARowKey", 20);
int expectedStatusCode = 204;
SingleFieldEntity tableEntity = new SingleFieldEntity(partitionKeyValue, rowKeyValue);
tableEntity.setSubclassProperty("InitialValue");
tableClient.createEntity(tableEntity).block(TIMEOUT);
tableEntity.setSubclassProperty("UpdatedValue");
StepVerifier.create(tableClient.updateEntityWithResponse(tableEntity, TableEntityUpdateMode.REPLACE, true))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
StepVerifier.create(tableClient.getEntity(partitionKeyValue, rowKeyValue))
.assertNext(entity -> {
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.containsKey("SubclassProperty"));
assertEquals("UpdatedValue", properties.get("SubclassProperty"));
})
.verifyComplete();
}*/
@Test
public void listEntities() {
listEntitiesImpl("partitionKey", "rowKey");
}
@Test
public void listEntitiesWithSingleQuotesInPartitionKey() {
listEntitiesImpl("partition'Key", "rowKey");
}
@Test
public void listEntitiesWithSingleQuotesInRowKey() {
listEntitiesImpl("partitionKey", "row'Key");
}
private void listEntitiesImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final String rowKeyValue2 = testResourceNamer.randomName(rowKeyPrefix, 20);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities())
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void listEntitiesWithFilter() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
ListEntitiesOptions options = new ListEntitiesOptions().setFilter("RowKey eq '" + rowKeyValue + "'");
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities(options))
.assertNext(returnEntity -> {
assertEquals(partitionKeyValue, returnEntity.getPartitionKey());
assertEquals(rowKeyValue, returnEntity.getRowKey());
})
.expectNextCount(0)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void listEntitiesWithSelect() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue)
.addProperty("propertyC", "valueC")
.addProperty("propertyD", "valueD");
List<String> propertyList = new ArrayList<>();
propertyList.add("propertyC");
ListEntitiesOptions options = new ListEntitiesOptions()
.setSelect(propertyList);
tableClient.createEntity(entity).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities(options))
.assertNext(returnEntity -> {
assertNull(returnEntity.getRowKey());
assertNull(returnEntity.getPartitionKey());
assertEquals("valueC", returnEntity.getProperties().get("propertyC"));
assertNull(returnEntity.getProperties().get("propertyD"));
})
.expectComplete()
.verify();
}
@Test
public void listEntitiesWithTop() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue3 = testResourceNamer.randomName("rowKey", 20);
ListEntitiesOptions options = new ListEntitiesOptions().setTop(2);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue3)).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities(options))
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
/*@Test
public void listEntitiesSubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities(SampleEntity.class))
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}*/
@Test
public void submitTransaction() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
int expectedBatchStatusCode = 202;
int expectedOperationStatusCode = 204;
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue2)));
final Response<TableTransactionResult> result =
tableClient.submitTransactionWithResponse(transactionalBatch).block(TIMEOUT);
assertNotNull(result);
assertEquals(expectedBatchStatusCode, result.getStatusCode());
assertEquals(transactionalBatch.size(), result.getValue().getTransactionActionResponses().size());
assertEquals(expectedOperationStatusCode,
result.getValue().getTransactionActionResponses().get(0).getStatusCode());
assertEquals(expectedOperationStatusCode,
result.getValue().getTransactionActionResponses().get(1).getStatusCode());
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))
.assertNext(response -> {
final TableEntity entity = response.getValue();
assertNotNull(entity);
assertEquals(partitionKeyValue, entity.getPartitionKey());
assertEquals(rowKeyValue, entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertNotNull(entity.getProperties());
})
.expectComplete()
.verify();
}
@Test
public void submitTransactionAllActions() {
submitTransactionAllActionsImpl("partitionKey", "rowKey");
}
@Test
public void submitTransactionAllActionsForEntitiesWithSingleQuotesInPartitionKey() {
submitTransactionAllActionsImpl("partition'Key", "rowKey");
}
@Test
public void submitTransactionAllActionsForEntitiesWithSingleQuotesInRowKey() {
submitTransactionAllActionsImpl("partitionKey", "row'Key");
}
private void submitTransactionAllActionsImpl(String partitionKeyPrefix, String rowKeyPrefix) {
String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
String rowKeyValueCreate = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertInsert = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertMerge = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertReplace = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpdateMerge = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpdateReplace = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueDelete = testResourceNamer.randomName(rowKeyPrefix, 20);
int expectedBatchStatusCode = 202;
int expectedOperationStatusCode = 204;
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueDelete)).block(TIMEOUT);
TableEntity toUpsertMerge = new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge);
toUpsertMerge.addProperty("Test", "MergedValue");
TableEntity toUpsertReplace = new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace);
toUpsertReplace.addProperty("Test", "ReplacedValue");
TableEntity toUpdateMerge = new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge);
toUpdateMerge.addProperty("Test", "MergedValue");
TableEntity toUpdateReplace = new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace);
toUpdateReplace.addProperty("Test", "MergedValue");
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,
new TableEntity(partitionKeyValue, rowKeyValueCreate)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE,
new TableEntity(partitionKeyValue, rowKeyValueUpsertInsert)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE, toUpsertMerge));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_REPLACE, toUpsertReplace));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_MERGE, toUpdateMerge));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_REPLACE, toUpdateReplace));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,
new TableEntity(partitionKeyValue, rowKeyValueDelete)));
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.assertNext(response -> {
assertNotNull(response);
assertEquals(expectedBatchStatusCode, response.getStatusCode());
TableTransactionResult result = response.getValue();
assertEquals(transactionalBatch.size(), result.getTransactionActionResponses().size());
for (TableTransactionActionResponse subResponse : result.getTransactionActionResponses()) {
assertEquals(expectedOperationStatusCode, subResponse.getStatusCode());
}
})
.expectComplete()
.verify();
}
@Test
public void submitTransactionWithFailingAction() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,
new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,
new TableEntity(partitionKeyValue, rowKeyValue2)));
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableTransactionFailedException
&& e.getMessage().contains("An action within the operation failed")
&& e.getMessage().contains("The failed operation was")
&& e.getMessage().contains("DeleteEntity")
&& e.getMessage().contains("partitionKey='" + partitionKeyValue)
&& e.getMessage().contains("rowKey='" + rowKeyValue2))
.verify();
}
@Test
public void submitTransactionWithSameRowKeys() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
if (IS_COSMOS_TEST) {
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableServiceException
&& e.getMessage().contains("Status code 400")
&& e.getMessage().contains("InvalidDuplicateRow")
&& e.getMessage().contains("The batch request contains multiple changes with same row key.")
&& e.getMessage().contains("An entity can appear only once in a batch request."))
.verify();
} else {
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableTransactionFailedException
&& e.getMessage().contains("An action within the operation failed")
&& e.getMessage().contains("The failed operation was")
&& e.getMessage().contains("CreateEntity")
&& e.getMessage().contains("partitionKey='" + partitionKeyValue)
&& e.getMessage().contains("rowKey='" + rowKeyValue))
.verify();
}
}
@Test
public void submitTransactionWithDifferentPartitionKeys() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String partitionKeyValue2 = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue2, rowKeyValue2)));
if (IS_COSMOS_TEST) {
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableTransactionFailedException
&& e.getMessage().contains("An action within the operation failed")
&& e.getMessage().contains("The failed operation was")
&& e.getMessage().contains("CreateEntity")
&& e.getMessage().contains("partitionKey='" + partitionKeyValue)
&& e.getMessage().contains("rowKey='" + rowKeyValue))
.verify();
} else {
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableTransactionFailedException
&& e.getMessage().contains("An action within the operation failed")
&& e.getMessage().contains("The failed operation was")
&& e.getMessage().contains("CreateEntity")
&& e.getMessage().contains("partitionKey='" + partitionKeyValue2)
&& e.getMessage().contains("rowKey='" + rowKeyValue2))
.verify();
}
}
@Test
public void generateSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("r");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = tableClient.generateSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&tn=" + tableClient.getTableName()
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("raud");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final String startPartitionKey = "startPartitionKey";
final String startRowKey = "startRowKey";
final String endPartitionKey = "endPartitionKey";
final String endRowKey = "endRowKey";
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange)
.setStartPartitionKey(startPartitionKey)
.setStartRowKey(startRowKey)
.setEndPartitionKey(endPartitionKey)
.setEndRowKey(endRowKey);
final String sas = tableClient.generateSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&tn=" + tableClient.getTableName()
+ "&sp=raud"
+ "&spk=startPartitionKey"
+ "&srk=startRowKey"
+ "&epk=endPartitionKey"
+ "&erk=endRowKey"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Skipping Cosmos test.");
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("a");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = tableClient.generateSas(sasSignatureValues);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(tableClient.getTableEndpoint())
.sasToken(sas)
.tableName(tableClient.getTableName());
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void setAndListAccessPolicies() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and listing access policies is not supported on Cosmos endpoints.");
OffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
OffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
String permissions = "r";
TableAccessPolicy tableAccessPolicy = new TableAccessPolicy()
.setStartsOn(startTime)
.setExpiresOn(expiryTime)
.setPermissions(permissions);
String id = "testPolicy";
TableSignedIdentifier tableSignedIdentifier = new TableSignedIdentifier(id).setAccessPolicy(tableAccessPolicy);
StepVerifier.create(tableClient.setAccessPoliciesWithResponse(Collections.singletonList(tableSignedIdentifier)))
.assertNext(response -> assertEquals(204, response.getStatusCode()))
.expectComplete()
.verify();
StepVerifier.create(tableClient.getAccessPolicies())
.assertNext(tableAccessPolicies -> {
assertNotNull(tableAccessPolicies);
assertNotNull(tableAccessPolicies.getIdentifiers());
TableSignedIdentifier signedIdentifier = tableAccessPolicies.getIdentifiers().get(0);
assertNotNull(signedIdentifier);
TableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();
assertNotNull(accessPolicy);
assertEquals(startTime, accessPolicy.getStartsOn());
assertEquals(expiryTime, accessPolicy.getExpiresOn());
assertEquals(permissions, accessPolicy.getPermissions());
assertEquals(id, signedIdentifier.getId());
})
.expectComplete()
.verify();
}
@Test
public void setAndListMultipleAccessPolicies() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and listing access policies is not supported on Cosmos endpoints");
OffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
OffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
String permissions = "r";
TableAccessPolicy tableAccessPolicy = new TableAccessPolicy()
.setStartsOn(startTime)
.setExpiresOn(expiryTime)
.setPermissions(permissions);
String id1 = "testPolicy1";
String id2 = "testPolicy2";
List<TableSignedIdentifier> tableSignedIdentifiers = new ArrayList<>();
tableSignedIdentifiers.add(new TableSignedIdentifier(id1).setAccessPolicy(tableAccessPolicy));
tableSignedIdentifiers.add(new TableSignedIdentifier(id2).setAccessPolicy(tableAccessPolicy));
StepVerifier.create(tableClient.setAccessPoliciesWithResponse(tableSignedIdentifiers))
.assertNext(response -> assertEquals(204, response.getStatusCode()))
.expectComplete()
.verify();
StepVerifier.create(tableClient.getAccessPolicies())
.assertNext(tableAccessPolicies -> {
assertNotNull(tableAccessPolicies);
assertNotNull(tableAccessPolicies.getIdentifiers());
assertEquals(2, tableAccessPolicies.getIdentifiers().size());
assertEquals(id1, tableAccessPolicies.getIdentifiers().get(0).getId());
assertEquals(id2, tableAccessPolicies.getIdentifiers().get(1).getId());
for (TableSignedIdentifier signedIdentifier : tableAccessPolicies.getIdentifiers()) {
assertNotNull(signedIdentifier);
TableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();
assertNotNull(accessPolicy);
assertEquals(startTime, accessPolicy.getStartsOn());
assertEquals(expiryTime, accessPolicy.getExpiresOn());
assertEquals(permissions, accessPolicy.getPermissions());
}
})
.expectComplete()
.verify();
}
} | class TableAsyncClientTest extends TableClientTestBase {
private static final Duration TIMEOUT = Duration.ofSeconds(100);
private TableAsyncClient tableClient;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(TIMEOUT);
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
protected void beforeTest() {
final String tableName = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
tableClient = getClientBuilder(tableName, connectionString).buildAsyncClient();
tableClient.createTable().block(TIMEOUT);
}
@Test
public void createTable() {
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
final TableAsyncClient tableClient2 = getClientBuilder(tableName2, connectionString).buildAsyncClient();
StepVerifier.create(tableClient2.createTable())
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void createTableWithMultipleTenants() {
Assumptions.assumeTrue(tableClient.getTableEndpoint().contains("core.windows.net")
&& tableClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableAsyncClient tableClient2 =
getClientBuilder(tableName2, Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
StepVerifier.create(tableClient2.createTable())
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
StepVerifier.create(tableClient2.createEntity(tableEntity))
.expectComplete()
.verify();
}
@Test
public void createTableWithResponse() {
final String tableName2 = testResourceNamer.randomName("tableName", 20);
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
final TableAsyncClient tableClient2 = getClientBuilder(tableName2, connectionString).buildAsyncClient();
final int expectedStatusCode = 204;
StepVerifier.create(tableClient2.createTableWithResponse())
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
})
.expectComplete()
.verify();
}
@Test
public void createEntity() {
createEntityImpl("partitionKey", "rowKey");
}
@Test
public void createEntityWithSingleQuotesInPartitionKey() {
createEntityImpl("partition'Key", "rowKey");
}
@Test
public void createEntityWithSingleQuotesInRowKey() {
createEntityImpl("partitionKey", "row'Key");
}
private void createEntityImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
StepVerifier.create(tableClient.createEntity(tableEntity))
.expectComplete()
.verify();
}
@Test
public void createEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void createEntityWithAllSupportedDataTypes() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final boolean booleanValue = true;
final byte[] binaryValue = "Test value".getBytes();
final Date dateValue = new Date();
final OffsetDateTime offsetDateTimeValue = OffsetDateTime.now();
final double doubleValue = 2.0d;
final UUID guidValue = UUID.randomUUID();
final int int32Value = 1337;
final long int64Value = 1337L;
final String stringValue = "This is table entity";
tableEntity.addProperty("BinaryTypeProperty", binaryValue);
tableEntity.addProperty("BooleanTypeProperty", booleanValue);
tableEntity.addProperty("DateTypeProperty", dateValue);
tableEntity.addProperty("OffsetDateTimeTypeProperty", offsetDateTimeValue);
tableEntity.addProperty("DoubleTypeProperty", doubleValue);
tableEntity.addProperty("GuidTypeProperty", guidValue);
tableEntity.addProperty("Int32TypeProperty", int32Value);
tableEntity.addProperty("Int64TypeProperty", int64Value);
tableEntity.addProperty("StringTypeProperty", stringValue);
tableClient.createEntity(tableEntity).block(TIMEOUT);
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))
.assertNext(response -> {
final TableEntity entity = response.getValue();
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.get("BinaryTypeProperty") instanceof byte[]);
assertTrue(properties.get("BooleanTypeProperty") instanceof Boolean);
assertTrue(properties.get("DateTypeProperty") instanceof OffsetDateTime);
assertTrue(properties.get("OffsetDateTimeTypeProperty") instanceof OffsetDateTime);
assertTrue(properties.get("DoubleTypeProperty") instanceof Double);
assertTrue(properties.get("GuidTypeProperty") instanceof UUID);
assertTrue(properties.get("Int32TypeProperty") instanceof Integer);
assertTrue(properties.get("Int64TypeProperty") instanceof Long);
assertTrue(properties.get("StringTypeProperty") instanceof String);
})
.expectComplete()
.verify();
}
/*@Test
public void createEntitySubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
byte[] bytes = new byte[]{1, 2, 3};
boolean b = true;
OffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
double d = 1.23D;
UUID uuid = UUID.fromString("11111111-2222-3333-4444-555555555555");
int i = 123;
long l = 123L;
String s = "Test";
SampleEntity.Color color = SampleEntity.Color.GREEN;
SampleEntity tableEntity = new SampleEntity(partitionKeyValue, rowKeyValue);
tableEntity.setByteField(bytes);
tableEntity.setBooleanField(b);
tableEntity.setDateTimeField(dateTime);
tableEntity.setDoubleField(d);
tableEntity.setUuidField(uuid);
tableEntity.setIntField(i);
tableEntity.setLongField(l);
tableEntity.setStringField(s);
tableEntity.setEnumField(color);
tableClient.createEntity(tableEntity).block(TIMEOUT);
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))
.assertNext(response -> {
TableEntity entity = response.getValue();
assertArrayEquals((byte[]) entity.getProperties().get("ByteField"), bytes);
assertEquals(entity.getProperties().get("BooleanField"), b);
assertTrue(dateTime.isEqual((OffsetDateTime) entity.getProperties().get("DateTimeField")));
assertEquals(entity.getProperties().get("DoubleField"), d);
assertEquals(0, uuid.compareTo((UUID) entity.getProperties().get("UuidField")));
assertEquals(entity.getProperties().get("IntField"), i);
assertEquals(entity.getProperties().get("LongField"), l);
assertEquals(entity.getProperties().get("StringField"), s);
assertEquals(entity.getProperties().get("EnumField"), color.name());
})
.expectComplete()
.verify();
}*/
@Test
public void deleteTable() {
StepVerifier.create(tableClient.deleteTable())
.expectComplete()
.verify();
}
@Test
public void deleteNonExistingTable() {
tableClient.deleteTable().block();
StepVerifier.create(tableClient.deleteTable())
.expectComplete()
.verify();
}
@Test
public void deleteTableWithResponse() {
final int expectedStatusCode = 204;
StepVerifier.create(tableClient.deleteTableWithResponse())
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
})
.expectComplete()
.verify();
}
@Test
public void deleteNonExistingTableWithResponse() {
final int expectedStatusCode = 404;
tableClient.deleteTableWithResponse().block();
StepVerifier.create(tableClient.deleteTableWithResponse())
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void deleteEntity() {
deleteEntityImpl("partitionKey", "rowKey");
}
@Test
public void deleteEntityWithSingleQuotesInPartitionKey() {
deleteEntityImpl("partition'Key", "rowKey");
}
@Test
public void deleteEntityWithSingleQuotesInRowKey() {
deleteEntityImpl("partitionKey", "row'Key");
}
private void deleteEntityImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableClient.createEntity(tableEntity).block(TIMEOUT);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
StepVerifier.create(tableClient.deleteEntity(partitionKeyValue, rowKeyValue))
.expectComplete()
.verify();
}
@Test
public void deleteNonExistingEntity() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
StepVerifier.create(tableClient.deleteEntity(partitionKeyValue, rowKeyValue))
.expectComplete()
.verify();
}
@Test
public void deleteEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
tableClient.createEntity(tableEntity).block(TIMEOUT);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
StepVerifier.create(tableClient.deleteEntityWithResponse(createdEntity, false))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void deleteNonExistingEntityWithResponse() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 404;
StepVerifier.create(tableClient.deleteEntityWithResponse(entity, false))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void deleteEntityWithResponseMatchETag() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
tableClient.createEntity(tableEntity).block(TIMEOUT);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
StepVerifier.create(tableClient.deleteEntityWithResponse(createdEntity, true))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void getEntityWithSingleQuotesInPartitionKey() {
getEntityWithResponseAsyncImpl(this.tableClient, this.testResourceNamer, "partition'Key", "rowKey");
}
@Test
public void getEntityWithSingleQuotesInRowKey() {
getEntityWithResponseAsyncImpl(this.tableClient, this.testResourceNamer, "partitionKey", "row'Key");
}
@Test
public void getEntityWithResponse() {
getEntityWithResponseAsyncImpl(this.tableClient, this.testResourceNamer, "partitionKey", "rowKey");
}
static void getEntityWithResponseAsyncImpl(TableAsyncClient tableClient, TestResourceNamer testResourceNamer,
String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 200;
tableClient.createEntity(tableEntity).block(TIMEOUT);
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))
.assertNext(response -> {
final TableEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());
assertEquals(tableEntity.getRowKey(), entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertNotNull(entity.getProperties());
})
.expectComplete()
.verify();
}
@Test
public void getEntityWithResponseWithSelect() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableEntity.addProperty("Test", "Value");
final int expectedStatusCode = 200;
tableClient.createEntity(tableEntity).block(TIMEOUT);
List<String> propertyList = new ArrayList<>();
propertyList.add("Test");
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, propertyList))
.assertNext(response -> {
final TableEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertNull(entity.getPartitionKey());
assertNull(entity.getRowKey());
assertNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertEquals(entity.getProperties().get("Test"), "Value");
})
.expectComplete()
.verify();
}
@Test
public void updateEntityWithSingleQuotesInPartitionKey() {
updateEntityWithResponseAsync(TableEntityUpdateMode.REPLACE, "partition'Key", "rowKey");
}
@Test
public void updateEntityWithSingleQuotesInRowKey() {
updateEntityWithResponseAsync(TableEntityUpdateMode.REPLACE, "partitionKey", "row'Key");
}
/*@Test
public void getEntityWithResponseSubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
byte[] bytes = new byte[]{1, 2, 3};
boolean b = true;
OffsetDateTime dateTime = OffsetDateTime.of(2020, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
double d = 1.23D;
UUID uuid = UUID.fromString("11111111-2222-3333-4444-555555555555");
int i = 123;
long l = 123L;
String s = "Test";
SampleEntity.Color color = SampleEntity.Color.GREEN;
final Map<String, Object> props = new HashMap<>();
props.put("ByteField", bytes);
props.put("BooleanField", b);
props.put("DateTimeField", dateTime);
props.put("DoubleField", d);
props.put("UuidField", uuid);
props.put("IntField", i);
props.put("LongField", l);
props.put("StringField", s);
props.put("EnumField", color);
TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue);
tableEntity.setProperties(props);
int expectedStatusCode = 200;
tableClient.createEntity(tableEntity).block(TIMEOUT);
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null, SampleEntity.class))
.assertNext(response -> {
SampleEntity entity = response.getValue();
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(entity);
assertEquals(tableEntity.getPartitionKey(), entity.getPartitionKey());
assertEquals(tableEntity.getRowKey(), entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertArrayEquals(bytes, entity.getByteField());
assertEquals(b, entity.getBooleanField());
assertTrue(dateTime.isEqual(entity.getDateTimeField()));
assertEquals(d, entity.getDoubleField());
assertEquals(0, uuid.compareTo(entity.getUuidField()));
assertEquals(i, entity.getIntField());
assertEquals(l, entity.getLongField());
assertEquals(s, entity.getStringField());
assertEquals(color, entity.getEnumField());
})
.expectComplete()
.verify();
}*/
@Test
public void updateEntityWithResponseReplace() {
updateEntityWithResponseAsync(TableEntityUpdateMode.REPLACE, "partitionKey", "rowKey");
}
@Test
public void updateEntityWithResponseMerge() {
updateEntityWithResponseAsync(TableEntityUpdateMode.MERGE, "partitionKey", "rowKey");
}
/**
* In the case of {@link TableEntityUpdateMode
* In the case of {@link TableEntityUpdateMode
*/
void updateEntityWithResponseAsync(TableEntityUpdateMode mode, String partitionKeyPrefix, String rowKeyPrefix) {
final boolean expectOldProperty = mode == TableEntityUpdateMode.MERGE;
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final int expectedStatusCode = 204;
final String oldPropertyKey = "propertyA";
final String newPropertyKey = "propertyB";
final TableEntity tableEntity = new TableEntity(partitionKeyValue, rowKeyValue)
.addProperty(oldPropertyKey, "valueA");
tableClient.createEntity(tableEntity).block(TIMEOUT);
final TableEntity createdEntity = tableClient.getEntity(partitionKeyValue, rowKeyValue).block(TIMEOUT);
assertNotNull(createdEntity, "'createdEntity' should not be null.");
assertNotNull(createdEntity.getETag(), "'eTag' should not be null.");
createdEntity.getProperties().remove(oldPropertyKey);
createdEntity.addProperty(newPropertyKey, "valueB");
StepVerifier.create(tableClient.updateEntityWithResponse(createdEntity, mode, true))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
StepVerifier.create(tableClient.getEntity(partitionKeyValue, rowKeyValue))
.assertNext(entity -> {
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.containsKey(newPropertyKey));
assertEquals(expectOldProperty, properties.containsKey(oldPropertyKey));
})
.verifyComplete();
}
/*@Test
public void updateEntityWithResponseSubclass() {
String partitionKeyValue = testResourceNamer.randomName("APartitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("ARowKey", 20);
int expectedStatusCode = 204;
SingleFieldEntity tableEntity = new SingleFieldEntity(partitionKeyValue, rowKeyValue);
tableEntity.setSubclassProperty("InitialValue");
tableClient.createEntity(tableEntity).block(TIMEOUT);
tableEntity.setSubclassProperty("UpdatedValue");
StepVerifier.create(tableClient.updateEntityWithResponse(tableEntity, TableEntityUpdateMode.REPLACE, true))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
StepVerifier.create(tableClient.getEntity(partitionKeyValue, rowKeyValue))
.assertNext(entity -> {
final Map<String, Object> properties = entity.getProperties();
assertTrue(properties.containsKey("SubclassProperty"));
assertEquals("UpdatedValue", properties.get("SubclassProperty"));
})
.verifyComplete();
}*/
@Test
public void listEntities() {
listEntitiesImpl("partitionKey", "rowKey");
}
@Test
public void listEntitiesWithSingleQuotesInPartitionKey() {
listEntitiesImpl("partition'Key", "rowKey");
}
@Test
public void listEntitiesWithSingleQuotesInRowKey() {
listEntitiesImpl("partitionKey", "row'Key");
}
private void listEntitiesImpl(String partitionKeyPrefix, String rowKeyPrefix) {
final String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
final String rowKeyValue = testResourceNamer.randomName(rowKeyPrefix, 20);
final String rowKeyValue2 = testResourceNamer.randomName(rowKeyPrefix, 20);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities())
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void listEntitiesWithFilter() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
ListEntitiesOptions options = new ListEntitiesOptions().setFilter("RowKey eq '" + rowKeyValue + "'");
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities(options))
.assertNext(returnEntity -> {
assertEquals(partitionKeyValue, returnEntity.getPartitionKey());
assertEquals(rowKeyValue, returnEntity.getRowKey());
})
.expectNextCount(0)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void listEntitiesWithSelect() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue)
.addProperty("propertyC", "valueC")
.addProperty("propertyD", "valueD");
List<String> propertyList = new ArrayList<>();
propertyList.add("propertyC");
ListEntitiesOptions options = new ListEntitiesOptions()
.setSelect(propertyList);
tableClient.createEntity(entity).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities(options))
.assertNext(returnEntity -> {
assertNull(returnEntity.getRowKey());
assertNull(returnEntity.getPartitionKey());
assertEquals("valueC", returnEntity.getProperties().get("propertyC"));
assertNull(returnEntity.getProperties().get("propertyD"));
})
.expectComplete()
.verify();
}
@Test
public void listEntitiesWithTop() {
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
final String rowKeyValue3 = testResourceNamer.randomName("rowKey", 20);
ListEntitiesOptions options = new ListEntitiesOptions().setTop(2);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue3)).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities(options))
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
/*@Test
public void listEntitiesSubclass() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValue2)).block(TIMEOUT);
StepVerifier.create(tableClient.listEntities(SampleEntity.class))
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}*/
@Test
public void submitTransaction() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
int expectedBatchStatusCode = 202;
int expectedOperationStatusCode = 204;
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue2)));
final Response<TableTransactionResult> result =
tableClient.submitTransactionWithResponse(transactionalBatch).block(TIMEOUT);
assertNotNull(result);
assertEquals(expectedBatchStatusCode, result.getStatusCode());
assertEquals(transactionalBatch.size(), result.getValue().getTransactionActionResponses().size());
assertEquals(expectedOperationStatusCode,
result.getValue().getTransactionActionResponses().get(0).getStatusCode());
assertEquals(expectedOperationStatusCode,
result.getValue().getTransactionActionResponses().get(1).getStatusCode());
StepVerifier.create(tableClient.getEntityWithResponse(partitionKeyValue, rowKeyValue, null))
.assertNext(response -> {
final TableEntity entity = response.getValue();
assertNotNull(entity);
assertEquals(partitionKeyValue, entity.getPartitionKey());
assertEquals(rowKeyValue, entity.getRowKey());
assertNotNull(entity.getTimestamp());
assertNotNull(entity.getETag());
assertNotNull(entity.getProperties());
})
.expectComplete()
.verify();
}
@Test
public void submitTransactionAllActions() {
submitTransactionAllActionsImpl("partitionKey", "rowKey");
}
@Test
public void submitTransactionAllActionsForEntitiesWithSingleQuotesInPartitionKey() {
submitTransactionAllActionsImpl("partition'Key", "rowKey");
}
@Test
public void submitTransactionAllActionsForEntitiesWithSingleQuotesInRowKey() {
submitTransactionAllActionsImpl("partitionKey", "row'Key");
}
private void submitTransactionAllActionsImpl(String partitionKeyPrefix, String rowKeyPrefix) {
String partitionKeyValue = testResourceNamer.randomName(partitionKeyPrefix, 20);
String rowKeyValueCreate = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertInsert = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertMerge = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpsertReplace = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpdateMerge = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueUpdateReplace = testResourceNamer.randomName(rowKeyPrefix, 20);
String rowKeyValueDelete = testResourceNamer.randomName(rowKeyPrefix, 20);
int expectedBatchStatusCode = 202;
int expectedOperationStatusCode = 204;
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace)).block(TIMEOUT);
tableClient.createEntity(new TableEntity(partitionKeyValue, rowKeyValueDelete)).block(TIMEOUT);
TableEntity toUpsertMerge = new TableEntity(partitionKeyValue, rowKeyValueUpsertMerge);
toUpsertMerge.addProperty("Test", "MergedValue");
TableEntity toUpsertReplace = new TableEntity(partitionKeyValue, rowKeyValueUpsertReplace);
toUpsertReplace.addProperty("Test", "ReplacedValue");
TableEntity toUpdateMerge = new TableEntity(partitionKeyValue, rowKeyValueUpdateMerge);
toUpdateMerge.addProperty("Test", "MergedValue");
TableEntity toUpdateReplace = new TableEntity(partitionKeyValue, rowKeyValueUpdateReplace);
toUpdateReplace.addProperty("Test", "MergedValue");
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,
new TableEntity(partitionKeyValue, rowKeyValueCreate)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE,
new TableEntity(partitionKeyValue, rowKeyValueUpsertInsert)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_MERGE, toUpsertMerge));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPSERT_REPLACE, toUpsertReplace));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_MERGE, toUpdateMerge));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.UPDATE_REPLACE, toUpdateReplace));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,
new TableEntity(partitionKeyValue, rowKeyValueDelete)));
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.assertNext(response -> {
assertNotNull(response);
assertEquals(expectedBatchStatusCode, response.getStatusCode());
TableTransactionResult result = response.getValue();
assertEquals(transactionalBatch.size(), result.getTransactionActionResponses().size());
for (TableTransactionActionResponse subResponse : result.getTransactionActionResponses()) {
assertEquals(expectedOperationStatusCode, subResponse.getStatusCode());
}
})
.expectComplete()
.verify();
}
@Test
public void submitTransactionWithFailingAction() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.CREATE,
new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(TableTransactionActionType.DELETE,
new TableEntity(partitionKeyValue, rowKeyValue2)));
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableTransactionFailedException
&& e.getMessage().contains("An action within the operation failed")
&& e.getMessage().contains("The failed operation was")
&& e.getMessage().contains("DeleteEntity")
&& e.getMessage().contains("partitionKey='" + partitionKeyValue)
&& e.getMessage().contains("rowKey='" + rowKeyValue2))
.verify();
}
@Test
public void submitTransactionWithSameRowKeys() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
if (IS_COSMOS_TEST) {
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableServiceException
&& e.getMessage().contains("Status code 400")
&& e.getMessage().contains("InvalidDuplicateRow")
&& e.getMessage().contains("The batch request contains multiple changes with same row key.")
&& e.getMessage().contains("An entity can appear only once in a batch request."))
.verify();
} else {
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableTransactionFailedException
&& e.getMessage().contains("An action within the operation failed")
&& e.getMessage().contains("The failed operation was")
&& e.getMessage().contains("CreateEntity")
&& e.getMessage().contains("partitionKey='" + partitionKeyValue)
&& e.getMessage().contains("rowKey='" + rowKeyValue))
.verify();
}
}
@Test
public void submitTransactionWithDifferentPartitionKeys() {
String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
String partitionKeyValue2 = testResourceNamer.randomName("partitionKey", 20);
String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
String rowKeyValue2 = testResourceNamer.randomName("rowKey", 20);
List<TableTransactionAction> transactionalBatch = new ArrayList<>();
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue, rowKeyValue)));
transactionalBatch.add(new TableTransactionAction(
TableTransactionActionType.CREATE, new TableEntity(partitionKeyValue2, rowKeyValue2)));
if (IS_COSMOS_TEST) {
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableTransactionFailedException
&& e.getMessage().contains("An action within the operation failed")
&& e.getMessage().contains("The failed operation was")
&& e.getMessage().contains("CreateEntity")
&& e.getMessage().contains("partitionKey='" + partitionKeyValue)
&& e.getMessage().contains("rowKey='" + rowKeyValue))
.verify();
} else {
StepVerifier.create(tableClient.submitTransactionWithResponse(transactionalBatch))
.expectErrorMatches(e -> e instanceof TableTransactionFailedException
&& e.getMessage().contains("An action within the operation failed")
&& e.getMessage().contains("The failed operation was")
&& e.getMessage().contains("CreateEntity")
&& e.getMessage().contains("partitionKey='" + partitionKeyValue2)
&& e.getMessage().contains("rowKey='" + rowKeyValue2))
.verify();
}
}
@Test
public void generateSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("r");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = tableClient.generateSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&tn=" + tableClient.getTableName()
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("raud");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final String startPartitionKey = "startPartitionKey";
final String startRowKey = "startRowKey";
final String endPartitionKey = "endPartitionKey";
final String endRowKey = "endRowKey";
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange)
.setStartPartitionKey(startPartitionKey)
.setStartRowKey(startRowKey)
.setEndPartitionKey(endPartitionKey)
.setEndRowKey(endRowKey);
final String sas = tableClient.generateSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&tn=" + tableClient.getTableName()
+ "&sp=raud"
+ "&spk=startPartitionKey"
+ "&srk=startRowKey"
+ "&epk=endPartitionKey"
+ "&erk=endRowKey"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Skipping Cosmos test.");
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasPermission permissions = TableSasPermission.parse("a");
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final TableSasSignatureValues sasSignatureValues =
new TableSasSignatureValues(expiryTime, permissions)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = tableClient.generateSas(sasSignatureValues);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(tableClient.getTableEndpoint())
.sasToken(sas)
.tableName(tableClient.getTableName());
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void setAndListAccessPolicies() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and listing access policies is not supported on Cosmos endpoints.");
OffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
OffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
String permissions = "r";
TableAccessPolicy tableAccessPolicy = new TableAccessPolicy()
.setStartsOn(startTime)
.setExpiresOn(expiryTime)
.setPermissions(permissions);
String id = "testPolicy";
TableSignedIdentifier tableSignedIdentifier = new TableSignedIdentifier(id).setAccessPolicy(tableAccessPolicy);
StepVerifier.create(tableClient.setAccessPoliciesWithResponse(Collections.singletonList(tableSignedIdentifier)))
.assertNext(response -> assertEquals(204, response.getStatusCode()))
.expectComplete()
.verify();
StepVerifier.create(tableClient.getAccessPolicies())
.assertNext(tableAccessPolicies -> {
assertNotNull(tableAccessPolicies);
assertNotNull(tableAccessPolicies.getIdentifiers());
TableSignedIdentifier signedIdentifier = tableAccessPolicies.getIdentifiers().get(0);
assertNotNull(signedIdentifier);
TableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();
assertNotNull(accessPolicy);
assertEquals(startTime, accessPolicy.getStartsOn());
assertEquals(expiryTime, accessPolicy.getExpiresOn());
assertEquals(permissions, accessPolicy.getPermissions());
assertEquals(id, signedIdentifier.getId());
})
.expectComplete()
.verify();
}
@Test
public void setAndListMultipleAccessPolicies() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and listing access policies is not supported on Cosmos endpoints");
OffsetDateTime startTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
OffsetDateTime expiryTime = OffsetDateTime.of(2022, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
String permissions = "r";
TableAccessPolicy tableAccessPolicy = new TableAccessPolicy()
.setStartsOn(startTime)
.setExpiresOn(expiryTime)
.setPermissions(permissions);
String id1 = "testPolicy1";
String id2 = "testPolicy2";
List<TableSignedIdentifier> tableSignedIdentifiers = new ArrayList<>();
tableSignedIdentifiers.add(new TableSignedIdentifier(id1).setAccessPolicy(tableAccessPolicy));
tableSignedIdentifiers.add(new TableSignedIdentifier(id2).setAccessPolicy(tableAccessPolicy));
StepVerifier.create(tableClient.setAccessPoliciesWithResponse(tableSignedIdentifiers))
.assertNext(response -> assertEquals(204, response.getStatusCode()))
.expectComplete()
.verify();
StepVerifier.create(tableClient.getAccessPolicies())
.assertNext(tableAccessPolicies -> {
assertNotNull(tableAccessPolicies);
assertNotNull(tableAccessPolicies.getIdentifiers());
assertEquals(2, tableAccessPolicies.getIdentifiers().size());
assertEquals(id1, tableAccessPolicies.getIdentifiers().get(0).getId());
assertEquals(id2, tableAccessPolicies.getIdentifiers().get(1).getId());
for (TableSignedIdentifier signedIdentifier : tableAccessPolicies.getIdentifiers()) {
assertNotNull(signedIdentifier);
TableAccessPolicy accessPolicy = signedIdentifier.getAccessPolicy();
assertNotNull(accessPolicy);
assertEquals(startTime, accessPolicy.getStartsOn());
assertEquals(expiryTime, accessPolicy.getExpiresOn());
assertEquals(permissions, accessPolicy.getPermissions());
}
})
.expectComplete()
.verify();
}
} |
Yeah, paging and listing operations have been integrated with sync-stack changes now. I see we haven't ignored any operations here which is the correct state to be. | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertAsync()
.build();
} | .build(); | protected HttpClient buildAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertAsync()
.build();
} | class TableServiceAsyncClientTest extends TableServiceClientTestBase {
private static final Duration TIMEOUT = Duration.ofSeconds(100);
private static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
private static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest();
private TableServiceAsyncClient serviceClient;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(TIMEOUT);
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@Override
protected void beforeTest() {
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
serviceClient = getClientBuilder(connectionString).buildAsyncClient();
}
@Test
public void serviceCreateTable() {
String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void serviceCreateTableWithMultipleTenants() {
Assumptions.assumeTrue(serviceClient.getServiceEndpoint().contains("core.windows.net")
&& serviceClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
String tableName = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableServiceAsyncClient tableServiceAsyncClient =
getClientBuilder(Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
StepVerifier.create(tableServiceAsyncClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
tableName = testResourceNamer.randomName("tableName", 20);
StepVerifier.create(tableServiceAsyncClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
StepVerifier.create(serviceClient.createTableWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableFailsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTable(tableName))
.expectErrorMatches(e -> e instanceof TableServiceException
&& ((TableServiceException) e).getResponse().getStatusCode() == 409)
.verify();
}
@Test
public void serviceCreateTableIfNotExists() {
String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.createTableIfNotExists(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTableIfNotExists(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 409;
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceDeleteTable() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.deleteTable(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteNonExistingTable() {
final String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.deleteTable(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
serviceClient.createTable(tableName).block();
StepVerifier.create(serviceClient.deleteTableWithResponse(tableName))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteNonExistingTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 404;
StepVerifier.create(serviceClient.deleteTableWithResponse(tableName))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void serviceListTables() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables())
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceListTablesWithFilter() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setFilter("TableName eq '" + tableName + "'");
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables(options))
.assertNext(table -> assertEquals(tableName, table.getName()))
.expectNextCount(0)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceListTablesWithTop() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
final String tableName3 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setTop(2);
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
serviceClient.createTable(tableName3).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables(options))
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceGetTableClient() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
TableAsyncClient tableClient = serviceClient.getTableClient(tableName);
TableAsyncClientTest.getEntityWithResponseAsyncImpl(tableClient, testResourceNamer, "partitionKey", "rowKey");
}
@Test
public void generateAccountSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("r");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateAccountSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("rdau");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange);
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=rdau"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("a");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(serviceClient.getServiceEndpoint())
.sasToken(sas)
.tableName(tableName);
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void setGetProperties() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and getting properties is not supported on Cosmos endpoints.");
TableServiceRetentionPolicy retentionPolicy = new TableServiceRetentionPolicy()
.setDaysToRetain(5)
.setEnabled(true);
TableServiceLogging logging = new TableServiceLogging()
.setReadLogged(true)
.setAnalyticsVersion("1.0")
.setRetentionPolicy(retentionPolicy);
List<TableServiceCorsRule> corsRules = new ArrayList<>();
corsRules.add(new TableServiceCorsRule()
.setAllowedMethods("GET,PUT,HEAD")
.setAllowedOrigins("*")
.setAllowedHeaders("x-ms-version")
.setExposedHeaders("x-ms-client-request-id")
.setMaxAgeInSeconds(10));
TableServiceMetrics hourMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceMetrics minuteMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceProperties sentProperties = new TableServiceProperties()
.setLogging(logging)
.setCorsRules(corsRules)
.setMinuteMetrics(minuteMetrics)
.setHourMetrics(hourMetrics);
StepVerifier.create(serviceClient.setPropertiesWithResponse(sentProperties))
.assertNext(response -> {
assertNotNull(response.getHeaders().getValue("x-ms-request-id"));
assertNotNull(response.getHeaders().getValue("x-ms-version"));
})
.expectComplete()
.verify();
sleepIfRunningAgainstService(30000);
StepVerifier.create(serviceClient.getProperties())
.assertNext(retrievedProperties -> assertPropertiesEquals(sentProperties, retrievedProperties))
.expectComplete()
.verify();
}
@Test
public void getStatistics() throws URISyntaxException {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Getting statistics is not supported on Cosmos endpoints.");
URI primaryEndpoint = new URI(serviceClient.getServiceEndpoint());
String[] hostParts = primaryEndpoint.getHost().split("\\.");
StringJoiner secondaryHostJoiner = new StringJoiner(".");
secondaryHostJoiner.add(hostParts[0] + "-secondary");
for (int i = 1; i < hostParts.length; i++) {
secondaryHostJoiner.add(hostParts[i]);
}
String secondaryEndpoint = primaryEndpoint.getScheme() + ":
TableServiceAsyncClient secondaryClient = new TableServiceClientBuilder()
.endpoint(secondaryEndpoint)
.serviceVersion(serviceClient.getServiceVersion())
.pipeline(serviceClient.getHttpPipeline())
.buildAsyncClient();
StepVerifier.create(secondaryClient.getStatistics())
.assertNext(statistics -> {
assertNotNull(statistics);
assertNotNull(statistics.getGeoReplication());
assertNotNull(statistics.getGeoReplication().getStatus());
assertNotNull(statistics.getGeoReplication().getLastSyncTime());
})
.expectComplete()
.verify();
}
} | class TableServiceAsyncClientTest extends TableServiceClientTestBase {
private static final Duration TIMEOUT = Duration.ofSeconds(100);
private static final HttpClient DEFAULT_HTTP_CLIENT = HttpClient.createDefault();
private static final boolean IS_COSMOS_TEST = TestUtils.isCosmosTest();
private TableServiceAsyncClient serviceClient;
@BeforeAll
static void beforeAll() {
StepVerifier.setDefaultTimeout(TIMEOUT);
}
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
@Override
protected void beforeTest() {
final String connectionString = TestUtils.getConnectionString(interceptorManager.isPlaybackMode());
serviceClient = getClientBuilder(connectionString).buildAsyncClient();
}
@Test
public void serviceCreateTable() {
String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
/**
* Tests that a table and entity can be created while having a different tenant ID than the one that will be
* provided in the authentication challenge.
*/
@Test
public void serviceCreateTableWithMultipleTenants() {
Assumptions.assumeTrue(serviceClient.getServiceEndpoint().contains("core.windows.net")
&& serviceClient.getServiceVersion() == TableServiceVersion.V2020_12_06);
String tableName = testResourceNamer.randomName("tableName", 20);
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.clientId(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_ID", "clientId"))
.clientSecret(Configuration.getGlobalConfiguration().get("TABLES_CLIENT_SECRET", "clientSecret"))
.tenantId(testResourceNamer.randomUuid())
.additionallyAllowedTenants("*")
.build();
final TableServiceAsyncClient tableServiceAsyncClient =
getClientBuilder(Configuration.getGlobalConfiguration().get("TABLES_ENDPOINT",
"https:
StepVerifier.create(tableServiceAsyncClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
tableName = testResourceNamer.randomName("tableName", 20);
StepVerifier.create(tableServiceAsyncClient.createTable(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
StepVerifier.create(serviceClient.createTableWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableFailsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTable(tableName))
.expectErrorMatches(e -> e instanceof TableServiceException
&& ((TableServiceException) e).getResponse().getStatusCode() == 409)
.verify();
}
@Test
public void serviceCreateTableIfNotExists() {
String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.createTableIfNotExists(tableName))
.assertNext(Assertions::assertNotNull)
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTableIfNotExists(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNotNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceCreateTableIfNotExistsWithResponseSucceedsIfExists() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 409;
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.createTableIfNotExistsWithResponse(tableName))
.assertNext(response -> {
assertEquals(expectedStatusCode, response.getStatusCode());
assertNull(response.getValue());
})
.expectComplete()
.verify();
}
@Test
public void serviceDeleteTable() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
StepVerifier.create(serviceClient.deleteTable(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteNonExistingTable() {
final String tableName = testResourceNamer.randomName("test", 20);
StepVerifier.create(serviceClient.deleteTable(tableName))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 204;
serviceClient.createTable(tableName).block();
StepVerifier.create(serviceClient.deleteTableWithResponse(tableName))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void serviceDeleteNonExistingTableWithResponse() {
String tableName = testResourceNamer.randomName("test", 20);
int expectedStatusCode = 404;
StepVerifier.create(serviceClient.deleteTableWithResponse(tableName))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void serviceListTables() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables())
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceListTablesWithFilter() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setFilter("TableName eq '" + tableName + "'");
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables(options))
.assertNext(table -> assertEquals(tableName, table.getName()))
.expectNextCount(0)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceListTablesWithTop() {
final String tableName = testResourceNamer.randomName("test", 20);
final String tableName2 = testResourceNamer.randomName("test", 20);
final String tableName3 = testResourceNamer.randomName("test", 20);
ListTablesOptions options = new ListTablesOptions().setTop(2);
serviceClient.createTable(tableName).block(TIMEOUT);
serviceClient.createTable(tableName2).block(TIMEOUT);
serviceClient.createTable(tableName3).block(TIMEOUT);
StepVerifier.create(serviceClient.listTables(options))
.expectNextCount(2)
.thenConsumeWhile(x -> true)
.expectComplete()
.verify();
}
@Test
public void serviceGetTableClient() {
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
TableAsyncClient tableClient = serviceClient.getTableClient(tableName);
TableAsyncClientTest.getEntityWithResponseAsyncImpl(tableClient, testResourceNamer, "partitionKey", "rowKey");
}
@Test
public void generateAccountSasTokenWithMinimumParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("r");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=r"
+ "&spr=https"
+ "&sig="
)
);
}
@Test
public void generateAccountSasTokenWithAllParameters() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("rdau");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_HTTP;
final OffsetDateTime startTime = OffsetDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final TableSasIpRange ipRange = TableSasIpRange.parse("a-b");
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion())
.setStartTime(startTime)
.setSasIpRange(ipRange);
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
assertTrue(
sas.startsWith(
"sv=2019-02-02"
+ "&ss=t"
+ "&srt=o"
+ "&st=2015-01-01T00%3A00%3A00Z"
+ "&se=2021-12-12T00%3A00%3A00Z"
+ "&sp=rdau"
+ "&sip=a-b"
+ "&spr=https%2Chttp"
+ "&sig="
)
);
}
@Test
@Disabled
public void canUseSasTokenToCreateValidTableClient() {
final OffsetDateTime expiryTime = OffsetDateTime.of(2021, 12, 12, 0, 0, 0, 0, ZoneOffset.UTC);
final TableAccountSasPermission permissions = TableAccountSasPermission.parse("a");
final TableAccountSasService services = new TableAccountSasService().setTableAccess(true);
final TableAccountSasResourceType resourceTypes = new TableAccountSasResourceType().setObject(true);
final TableSasProtocol protocol = TableSasProtocol.HTTPS_ONLY;
final TableAccountSasSignatureValues sasSignatureValues =
new TableAccountSasSignatureValues(expiryTime, permissions, services, resourceTypes)
.setProtocol(protocol)
.setVersion(TableServiceVersion.V2019_02_02.getVersion());
final String sas = serviceClient.generateAccountSas(sasSignatureValues);
final String tableName = testResourceNamer.randomName("test", 20);
serviceClient.createTable(tableName).block(TIMEOUT);
final TableClientBuilder tableClientBuilder = new TableClientBuilder()
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS))
.endpoint(serviceClient.getServiceEndpoint())
.sasToken(sas)
.tableName(tableName);
if (interceptorManager.isPlaybackMode()) {
tableClientBuilder.httpClient(playbackClient);
} else {
tableClientBuilder.httpClient(DEFAULT_HTTP_CLIENT);
if (!interceptorManager.isLiveMode()) {
tableClientBuilder.addPolicy(recordPolicy);
}
tableClientBuilder.addPolicy(new RetryPolicy(new ExponentialBackoff(6, Duration.ofMillis(1500),
Duration.ofSeconds(100))));
}
final TableAsyncClient tableAsyncClient = tableClientBuilder.buildAsyncClient();
final String partitionKeyValue = testResourceNamer.randomName("partitionKey", 20);
final String rowKeyValue = testResourceNamer.randomName("rowKey", 20);
final TableEntity entity = new TableEntity(partitionKeyValue, rowKeyValue);
final int expectedStatusCode = 204;
StepVerifier.create(tableAsyncClient.createEntityWithResponse(entity))
.assertNext(response -> assertEquals(expectedStatusCode, response.getStatusCode()))
.expectComplete()
.verify();
}
@Test
public void setGetProperties() {
Assumptions.assumeFalse(IS_COSMOS_TEST,
"Setting and getting properties is not supported on Cosmos endpoints.");
TableServiceRetentionPolicy retentionPolicy = new TableServiceRetentionPolicy()
.setDaysToRetain(5)
.setEnabled(true);
TableServiceLogging logging = new TableServiceLogging()
.setReadLogged(true)
.setAnalyticsVersion("1.0")
.setRetentionPolicy(retentionPolicy);
List<TableServiceCorsRule> corsRules = new ArrayList<>();
corsRules.add(new TableServiceCorsRule()
.setAllowedMethods("GET,PUT,HEAD")
.setAllowedOrigins("*")
.setAllowedHeaders("x-ms-version")
.setExposedHeaders("x-ms-client-request-id")
.setMaxAgeInSeconds(10));
TableServiceMetrics hourMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceMetrics minuteMetrics = new TableServiceMetrics()
.setEnabled(true)
.setVersion("1.0")
.setRetentionPolicy(retentionPolicy)
.setIncludeApis(true);
TableServiceProperties sentProperties = new TableServiceProperties()
.setLogging(logging)
.setCorsRules(corsRules)
.setMinuteMetrics(minuteMetrics)
.setHourMetrics(hourMetrics);
StepVerifier.create(serviceClient.setPropertiesWithResponse(sentProperties))
.assertNext(response -> {
assertNotNull(response.getHeaders().getValue("x-ms-request-id"));
assertNotNull(response.getHeaders().getValue("x-ms-version"));
})
.expectComplete()
.verify();
sleepIfRunningAgainstService(30000);
StepVerifier.create(serviceClient.getProperties())
.assertNext(retrievedProperties -> assertPropertiesEquals(sentProperties, retrievedProperties))
.expectComplete()
.verify();
}
@Test
public void getStatistics() throws URISyntaxException {
Assumptions.assumeFalse(IS_COSMOS_TEST, "Getting statistics is not supported on Cosmos endpoints.");
URI primaryEndpoint = new URI(serviceClient.getServiceEndpoint());
String[] hostParts = primaryEndpoint.getHost().split("\\.");
StringJoiner secondaryHostJoiner = new StringJoiner(".");
secondaryHostJoiner.add(hostParts[0] + "-secondary");
for (int i = 1; i < hostParts.length; i++) {
secondaryHostJoiner.add(hostParts[i]);
}
String secondaryEndpoint = primaryEndpoint.getScheme() + ":
TableServiceAsyncClient secondaryClient = new TableServiceClientBuilder()
.endpoint(secondaryEndpoint)
.serviceVersion(serviceClient.getServiceVersion())
.pipeline(serviceClient.getHttpPipeline())
.buildAsyncClient();
StepVerifier.create(secondaryClient.getStatistics())
.assertNext(statistics -> {
assertNotNull(statistics);
assertNotNull(statistics.getGeoReplication());
assertNotNull(statistics.getGeoReplication().getStatus());
assertNotNull(statistics.getGeoReplication().getLastSyncTime());
})
.expectComplete()
.verify();
}
} |
Does this need to include the `Concurrent` characteristic as well? | public long estimateSize() {
int currentSpliteratorSize = (items.size() - begin) / step + 1;
if (spliterators == null) {
return currentSpliteratorSize;
}
return currentSpliteratorSize + spliterators.size() * items.size() / step + 1;
} | public long estimateSize() {
return Integer.MAX_VALUE;
} | class ConcurrencyLimitingSpliterator<T> implements Spliterator<T> {
private List<T> items;
private int begin;
private final int step;
private final ConcurrentLinkedQueue<ConcurrencyLimitingSpliterator> spliterators;
public ConcurrencyLimitingSpliterator(List<T> items, int concurrency) {
Objects.requireNonNull(items, "'items' cannot be null.");
if (concurrency == 0) {
throw new IllegalArgumentException("'concurrency' must be a positive number.");
}
this.items = items;
this.step = concurrency;
if (concurrency > 1) {
spliterators = new ConcurrentLinkedQueue<>();
for (int i = 0; i < Math.min(concurrency, items.size()) - 1; i++) {
spliterators.add(new ConcurrencyLimitingSpliterator(items, i, concurrency));
}
} else {
spliterators = null;
}
this.begin = Math.min(concurrency, items.size()) - 1;
}
private ConcurrencyLimitingSpliterator(List<T> items, int begin, int step) {
this.items = items;
this.begin = begin;
this.step = step;
this.spliterators = null;
}
@Override
public boolean tryAdvance(Consumer action) {
if (begin >= items.size()) {
return false;
}
action.accept(items.get(begin));
begin += step;
return true;
}
@Override
public Spliterator trySplit() {
return spliterators != null ? spliterators.poll() : null;
}
@Override
@Override
public int characteristics() {
return NONNULL | ORDERED | SIZED | SUBSIZED;
}
} | class ConcurrencyLimitingSpliterator<T> implements Spliterator<T> {
private final AtomicInteger concurrency;
private final Iterator<T> iterator;
/**
* Creates spliterator.
*
* @param concurrency Number of sub-lists to split items to. When processing items concurrently,
* indicates number of threads to process items with.
*/
ConcurrencyLimitingSpliterator(Iterator<T> iterator, int concurrency) {
Objects.requireNonNull(iterator, "'iterator' cannot be null");
if (concurrency == 0) {
throw new IllegalArgumentException("'concurrency' must be a positive number.");
}
this.concurrency = new AtomicInteger(concurrency);
this.iterator = iterator;
}
@Override
public boolean tryAdvance(Consumer<? super T> action) {
T request = null;
synchronized (iterator) {
if (iterator.hasNext()) {
request = iterator.next();
}
}
if (request != null) {
action.accept(request);
return true;
}
return false;
}
@Override
public Spliterator<T> trySplit() {
return concurrency.getAndDecrement() > 1 ? new ConcurrencyLimitingSpliterator<>(iterator, 1) : null;
}
@Override
@Override
public int characteristics() {
return NONNULL | ORDERED & ~(Spliterator.SIZED | Spliterator.SUBSIZED);
}
} | |
Concurrent characteristics means that underlying data can be changed concurrently, which is not the case. For parallel execution, we need parallel stream, but spliterator does not really care about execution. | public long estimateSize() {
int currentSpliteratorSize = (items.size() - begin) / step + 1;
if (spliterators == null) {
return currentSpliteratorSize;
}
return currentSpliteratorSize + spliterators.size() * items.size() / step + 1;
} | public long estimateSize() {
return Integer.MAX_VALUE;
} | class ConcurrencyLimitingSpliterator<T> implements Spliterator<T> {
private List<T> items;
private int begin;
private final int step;
private final ConcurrentLinkedQueue<ConcurrencyLimitingSpliterator> spliterators;
public ConcurrencyLimitingSpliterator(List<T> items, int concurrency) {
Objects.requireNonNull(items, "'items' cannot be null.");
if (concurrency == 0) {
throw new IllegalArgumentException("'concurrency' must be a positive number.");
}
this.items = items;
this.step = concurrency;
if (concurrency > 1) {
spliterators = new ConcurrentLinkedQueue<>();
for (int i = 0; i < Math.min(concurrency, items.size()) - 1; i++) {
spliterators.add(new ConcurrencyLimitingSpliterator(items, i, concurrency));
}
} else {
spliterators = null;
}
this.begin = Math.min(concurrency, items.size()) - 1;
}
private ConcurrencyLimitingSpliterator(List<T> items, int begin, int step) {
this.items = items;
this.begin = begin;
this.step = step;
this.spliterators = null;
}
@Override
public boolean tryAdvance(Consumer action) {
if (begin >= items.size()) {
return false;
}
action.accept(items.get(begin));
begin += step;
return true;
}
@Override
public Spliterator trySplit() {
return spliterators != null ? spliterators.poll() : null;
}
@Override
@Override
public int characteristics() {
return NONNULL | ORDERED | SIZED | SUBSIZED;
}
} | class ConcurrencyLimitingSpliterator<T> implements Spliterator<T> {
private final AtomicInteger concurrency;
private final Iterator<T> iterator;
/**
* Creates spliterator.
*
* @param concurrency Number of sub-lists to split items to. When processing items concurrently,
* indicates number of threads to process items with.
*/
ConcurrencyLimitingSpliterator(Iterator<T> iterator, int concurrency) {
Objects.requireNonNull(iterator, "'iterator' cannot be null");
if (concurrency == 0) {
throw new IllegalArgumentException("'concurrency' must be a positive number.");
}
this.concurrency = new AtomicInteger(concurrency);
this.iterator = iterator;
}
@Override
public boolean tryAdvance(Consumer<? super T> action) {
T request = null;
synchronized (iterator) {
if (iterator.hasNext()) {
request = iterator.next();
}
}
if (request != null) {
action.accept(request);
return true;
}
return false;
}
@Override
public Spliterator<T> trySplit() {
return concurrency.getAndDecrement() > 1 ? new ConcurrencyLimitingSpliterator<>(iterator, 1) : null;
}
@Override
@Override
public int characteristics() {
return NONNULL | ORDERED & ~(Spliterator.SIZED | Spliterator.SUBSIZED);
}
} | |
65 or 60? | public void verifyHttpTimeoutPolicyResponseTimeout() throws Exception {
GlobalEndpointManager endpointManager = Mockito.mock(GlobalEndpointManager.class);
Mockito.doReturn(new URI("http:
Mockito.doReturn(Mono.empty()).when(endpointManager).refreshLocationAsync(Mockito.eq(null), Mockito.eq(true));
RetryContext retryContext = new RetryContext();
WebExceptionRetryPolicy retryPolicy = new WebExceptionRetryPolicy(retryContext);
Exception exception = ReadTimeoutException.INSTANCE;
CosmosException cosmosException = BridgeInternal.createCosmosException(null, HttpConstants.StatusCodes.REQUEST_TIMEOUT, exception);
BridgeInternal.setSubStatusCode(cosmosException, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
RxDocumentServiceRequest dsr;
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.QueryPlan, "/dbs/db/colls/col/docs/doc", ResourceType.Document);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofMillis(500));
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(5));
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(10));
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.Read, "/dbs/db/colls/col/docs/doc", ResourceType.Document);
retryContext = new RetryContext();
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(65));
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(65));
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(65));
} | assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(65)); | public void verifyHttpTimeoutPolicyResponseTimeout() throws Exception {
GlobalEndpointManager endpointManager = Mockito.mock(GlobalEndpointManager.class);
Mockito.doReturn(new URI("http:
Mockito.doReturn(Mono.empty()).when(endpointManager).refreshLocationAsync(Mockito.eq(null), Mockito.eq(true));
Exception exception = ReadTimeoutException.INSTANCE;
CosmosException cosmosException = BridgeInternal.createCosmosException(null, HttpConstants.StatusCodes.REQUEST_TIMEOUT, exception);
BridgeInternal.setSubStatusCode(cosmosException, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
RxDocumentServiceRequest dsr;
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.QueryPlan, "/dbs/db/colls/col/docs/doc", ResourceType.Document);
RetryContext retryContext = new RetryContext();
WebExceptionRetryPolicy retryPolicy = new WebExceptionRetryPolicy(retryContext);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofMillis(500));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(5));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(1);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(10));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.Read, "/dbs/db/colls/col/docs/doc", ResourceType.Document);
retryContext = new RetryContext();
retryPolicy = new WebExceptionRetryPolicy(retryContext);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(60));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(60));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(1);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(60));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.Read, "/dbs/db/colls/col/docs/doc", ResourceType.DatabaseAccount);
retryContext = new RetryContext();
retryPolicy = new WebExceptionRetryPolicy(retryContext);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(5));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(10));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(1);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(20));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
} | class HttpTimeoutPolicyTest {
private final static int TIMEOUT = 10000;
@Test(groups = "unit")
} | class HttpTimeoutPolicyTest {
private final static String BACKOFF_TIME_PROPERTY_NAME = "backoffSecondsTimeout";
@Test(groups = "unit")
public int getBackOffTime(WebExceptionRetryPolicy retryPolicy) throws Exception {
Field field = retryPolicy.getClass().getDeclaredField(BACKOFF_TIME_PROPERTY_NAME);
field.setAccessible(true);
return (int) field.get(retryPolicy);
}
} |
maybe also add the test for shouldRetry + backoff time | public void verifyHttpTimeoutPolicyResponseTimeout() throws Exception {
GlobalEndpointManager endpointManager = Mockito.mock(GlobalEndpointManager.class);
Mockito.doReturn(new URI("http:
Mockito.doReturn(Mono.empty()).when(endpointManager).refreshLocationAsync(Mockito.eq(null), Mockito.eq(true));
RetryContext retryContext = new RetryContext();
WebExceptionRetryPolicy retryPolicy = new WebExceptionRetryPolicy(retryContext);
Exception exception = ReadTimeoutException.INSTANCE;
CosmosException cosmosException = BridgeInternal.createCosmosException(null, HttpConstants.StatusCodes.REQUEST_TIMEOUT, exception);
BridgeInternal.setSubStatusCode(cosmosException, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
RxDocumentServiceRequest dsr;
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.QueryPlan, "/dbs/db/colls/col/docs/doc", ResourceType.Document);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofMillis(500));
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(5));
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(10));
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.Read, "/dbs/db/colls/col/docs/doc", ResourceType.Document);
retryContext = new RetryContext();
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(65));
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(65));
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(65));
} | retryPolicy.onBeforeSendRequest(dsr); | public void verifyHttpTimeoutPolicyResponseTimeout() throws Exception {
GlobalEndpointManager endpointManager = Mockito.mock(GlobalEndpointManager.class);
Mockito.doReturn(new URI("http:
Mockito.doReturn(Mono.empty()).when(endpointManager).refreshLocationAsync(Mockito.eq(null), Mockito.eq(true));
Exception exception = ReadTimeoutException.INSTANCE;
CosmosException cosmosException = BridgeInternal.createCosmosException(null, HttpConstants.StatusCodes.REQUEST_TIMEOUT, exception);
BridgeInternal.setSubStatusCode(cosmosException, HttpConstants.SubStatusCodes.GATEWAY_ENDPOINT_READ_TIMEOUT);
RxDocumentServiceRequest dsr;
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.QueryPlan, "/dbs/db/colls/col/docs/doc", ResourceType.Document);
RetryContext retryContext = new RetryContext();
WebExceptionRetryPolicy retryPolicy = new WebExceptionRetryPolicy(retryContext);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofMillis(500));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(5));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(1);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(10));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.Read, "/dbs/db/colls/col/docs/doc", ResourceType.Document);
retryContext = new RetryContext();
retryPolicy = new WebExceptionRetryPolicy(retryContext);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(60));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(60));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(1);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(60));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
dsr = RxDocumentServiceRequest.createFromName(mockDiagnosticsClientContext(),
OperationType.Read, "/dbs/db/colls/col/docs/doc", ResourceType.DatabaseAccount);
retryContext = new RetryContext();
retryPolicy = new WebExceptionRetryPolicy(retryContext);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(5));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(10));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(1);
retryContext.addStatusAndSubStatusCode(408, 10002);
retryPolicy.onBeforeSendRequest(dsr);
assertThat(dsr.getResponseTimeout()).isEqualTo(Duration.ofSeconds(20));
assertThat(getBackOffTime(retryPolicy)).isEqualTo(0);
} | class HttpTimeoutPolicyTest {
private final static int TIMEOUT = 10000;
@Test(groups = "unit")
} | class HttpTimeoutPolicyTest {
private final static String BACKOFF_TIME_PROPERTY_NAME = "backoffSecondsTimeout";
@Test(groups = "unit")
public int getBackOffTime(WebExceptionRetryPolicy retryPolicy) throws Exception {
Field field = retryPolicy.getClass().getDeclaredField(BACKOFF_TIME_PROPERTY_NAME);
field.setAccessible(true);
return (int) field.get(retryPolicy);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.