comment stringlengths 1 45k | method_body stringlengths 23 281k | target_code stringlengths 0 5.16k | method_body_after stringlengths 12 281k | context_before stringlengths 8 543k | context_after stringlengths 8 543k |
|---|---|---|---|---|---|
why sdk version hardcoded? | public Context getContext() {
if (sdkName == null) {
sdkName = this.getClass().getPackage().getName();
}
return new Context("Sdk-Name", sdkName)
.addData("Sdk-Version", SDK_VERSION);
} | .addData("Sdk-Version", SDK_VERSION); | public Context getContext() {
return new Context("Sdk-Name", sdkName)
.addData("Sdk-Version", SDK_VERSION);
} | class AzureServiceClient {
private final ClientLogger logger = new ClientLogger(getClass());
protected AzureServiceClient(HttpPipeline httpPipeline, AzureEnvironment environment) {
((AzureJacksonAdapter) serializerAdapter).serializer().registerModule(DateTimeDeserializer.getModule());
}
private static final String SDK_VERSION = "2.0.0-SNAPSHOT";
private final SerializerAdapter serializerAdapter = new AzureJacksonAdapter();
private String sdkName;
/**
* Gets serializer adapter for JSON serialization/de-serialization.
*
* @return the serializer adapter.
*/
public SerializerAdapter getSerializerAdapter() {
return this.serializerAdapter;
}
/**
* Gets default client context.
*
* @return the default client context.
*/
/**
* Merges default client context with provided context.
*
* @param context the context to be merged with default client context.
* @return the merged context.
*/
public Context mergeContext(Context context) {
for (Map.Entry<Object, Object> entry : this.getContext().getValues().entrySet()) {
context = context.addData(entry.getKey(), entry.getValue());
}
return context;
}
/**
* Gets long running operation result.
*
* @param lroInit the raw response of init operation.
* @param httpPipeline the http pipeline.
* @param pollResultType type of poll result.
* @param finalResultType type of final result.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return poller flux for poll result and final result.
*/
public <T, U> PollerFlux<PollResult<T>, U> getLroResultAsync(Mono<Response<Flux<ByteBuffer>>> lroInit,
HttpPipeline httpPipeline,
Type pollResultType, Type finalResultType) {
return PollerFactory.create(
getSerializerAdapter(),
httpPipeline,
pollResultType,
finalResultType,
SdkContext.getLroRetryDuration(),
lroInit
);
}
/**
* Gets the final result, or an error, based on last async poll response.
*
* @param response the last async poll response.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return the final result, or an error.
*/
public <T, U> Mono<U> getLroFinalResultOrError(AsyncPollResponse<PollResult<T>, U> response) {
if (response.getStatus() != LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) {
String errorMessage;
ManagementError managementError = null;
if (response.getValue().getError() != null) {
errorMessage = response.getValue().getError().getMessage();
String errorBody = response.getValue().getError().getResponseBody();
if (errorBody != null) {
try {
managementError = this.getSerializerAdapter().deserialize(
errorBody,
ManagementError.class,
SerializerEncoding.JSON);
if (managementError.getCode() == null || managementError.getMessage() == null) {
managementError = null;
}
} catch (IOException ioe) {
logger.logThrowableAsWarning(ioe);
}
}
} else {
errorMessage = "Long running operation failed.";
}
if (managementError == null) {
managementError = new ManagementError(response.getStatus().toString(), errorMessage);
}
return Mono.error(new ManagementException(errorMessage, null, managementError));
} else {
return response.getFinalResult();
}
}
private static class DateTimeDeserializer extends JsonDeserializer<OffsetDateTime> {
public static SimpleModule getModule() {
SimpleModule module = new SimpleModule();
module.addDeserializer(OffsetDateTime.class, new DateTimeDeserializer());
return module;
}
@Override
public OffsetDateTime deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException, JsonProcessingException {
String string = jsonParser.getText();
TemporalAccessor temporal =
DateTimeFormatter.ISO_DATE_TIME.parseBest(string, OffsetDateTime::from, LocalDateTime::from);
if (temporal.query(TemporalQueries.offset()) == null) {
return LocalDateTime.from(temporal).atOffset(ZoneOffset.UTC);
} else {
return OffsetDateTime.from(temporal);
}
}
}
} | class AzureServiceClient {
private final ClientLogger logger = new ClientLogger(getClass());
private static final Map<String, String> PROPERTIES =
CoreUtils.getProperties("azure.properties");
private static final String SDK_VERSION;
static {
SDK_VERSION = PROPERTIES.getOrDefault("version", "UnknownVersion");
}
private final SerializerAdapter serializerAdapter = new AzureJacksonAdapter();
private final String sdkName;
protected AzureServiceClient(HttpPipeline httpPipeline, AzureEnvironment environment) {
sdkName = this.getClass().getPackage().getName();
((AzureJacksonAdapter) serializerAdapter).serializer().registerModule(DateTimeDeserializer.getModule());
}
/**
* Gets serializer adapter for JSON serialization/de-serialization.
*
* @return the serializer adapter.
*/
public SerializerAdapter getSerializerAdapter() {
return this.serializerAdapter;
}
/**
* Gets default client context.
*
* @return the default client context.
*/
/**
* Merges default client context with provided context.
*
* @param context the context to be merged with default client context.
* @return the merged context.
*/
public Context mergeContext(Context context) {
for (Map.Entry<Object, Object> entry : this.getContext().getValues().entrySet()) {
context = context.addData(entry.getKey(), entry.getValue());
}
return context;
}
/**
* Gets long running operation result.
*
* @param lroInit the raw response of init operation.
* @param httpPipeline the http pipeline.
* @param pollResultType type of poll result.
* @param finalResultType type of final result.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return poller flux for poll result and final result.
*/
public <T, U> PollerFlux<PollResult<T>, U> getLroResultAsync(Mono<Response<Flux<ByteBuffer>>> lroInit,
HttpPipeline httpPipeline,
Type pollResultType, Type finalResultType) {
return PollerFactory.create(
getSerializerAdapter(),
httpPipeline,
pollResultType,
finalResultType,
SdkContext.getLroRetryDuration(),
lroInit
);
}
/**
* Gets the final result, or an error, based on last async poll response.
*
* @param response the last async poll response.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return the final result, or an error.
*/
public <T, U> Mono<U> getLroFinalResultOrError(AsyncPollResponse<PollResult<T>, U> response) {
if (response.getStatus() != LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) {
String errorMessage;
ManagementError managementError = null;
if (response.getValue().getError() != null) {
errorMessage = response.getValue().getError().getMessage();
String errorBody = response.getValue().getError().getResponseBody();
if (errorBody != null) {
try {
managementError = this.getSerializerAdapter().deserialize(
errorBody,
ManagementError.class,
SerializerEncoding.JSON);
if (managementError.getCode() == null || managementError.getMessage() == null) {
managementError = null;
}
} catch (IOException ioe) {
logger.logThrowableAsWarning(ioe);
}
}
} else {
errorMessage = "Long running operation failed.";
}
if (managementError == null) {
managementError = new ManagementError(response.getStatus().toString(), errorMessage);
}
return Mono.error(new ManagementException(errorMessage, null, managementError));
} else {
return response.getFinalResult();
}
}
private static class DateTimeDeserializer extends JsonDeserializer<OffsetDateTime> {
public static SimpleModule getModule() {
SimpleModule module = new SimpleModule();
module.addDeserializer(OffsetDateTime.class, new DateTimeDeserializer());
return module;
}
@Override
public OffsetDateTime deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException, JsonProcessingException {
String string = jsonParser.getText();
TemporalAccessor temporal =
DateTimeFormatter.ISO_DATE_TIME.parseBest(string, OffsetDateTime::from, LocalDateTime::from);
if (temporal.query(TemporalQueries.offset()) == null) {
return LocalDateTime.from(temporal).atOffset(ZoneOffset.UTC);
} else {
return OffsetDateTime.from(temporal);
}
}
}
} |
there might be other approach. not investigated yet. | public Context getContext() {
if (sdkName == null) {
sdkName = this.getClass().getPackage().getName();
}
return new Context("Sdk-Name", sdkName)
.addData("Sdk-Version", SDK_VERSION);
} | .addData("Sdk-Version", SDK_VERSION); | public Context getContext() {
return new Context("Sdk-Name", sdkName)
.addData("Sdk-Version", SDK_VERSION);
} | class AzureServiceClient {
private final ClientLogger logger = new ClientLogger(getClass());
protected AzureServiceClient(HttpPipeline httpPipeline, AzureEnvironment environment) {
((AzureJacksonAdapter) serializerAdapter).serializer().registerModule(DateTimeDeserializer.getModule());
}
private static final String SDK_VERSION = "2.0.0-SNAPSHOT";
private final SerializerAdapter serializerAdapter = new AzureJacksonAdapter();
private String sdkName;
/**
* Gets serializer adapter for JSON serialization/de-serialization.
*
* @return the serializer adapter.
*/
public SerializerAdapter getSerializerAdapter() {
return this.serializerAdapter;
}
/**
* Gets default client context.
*
* @return the default client context.
*/
/**
* Merges default client context with provided context.
*
* @param context the context to be merged with default client context.
* @return the merged context.
*/
public Context mergeContext(Context context) {
for (Map.Entry<Object, Object> entry : this.getContext().getValues().entrySet()) {
context = context.addData(entry.getKey(), entry.getValue());
}
return context;
}
/**
* Gets long running operation result.
*
* @param lroInit the raw response of init operation.
* @param httpPipeline the http pipeline.
* @param pollResultType type of poll result.
* @param finalResultType type of final result.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return poller flux for poll result and final result.
*/
public <T, U> PollerFlux<PollResult<T>, U> getLroResultAsync(Mono<Response<Flux<ByteBuffer>>> lroInit,
HttpPipeline httpPipeline,
Type pollResultType, Type finalResultType) {
return PollerFactory.create(
getSerializerAdapter(),
httpPipeline,
pollResultType,
finalResultType,
SdkContext.getLroRetryDuration(),
lroInit
);
}
/**
* Gets the final result, or an error, based on last async poll response.
*
* @param response the last async poll response.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return the final result, or an error.
*/
public <T, U> Mono<U> getLroFinalResultOrError(AsyncPollResponse<PollResult<T>, U> response) {
if (response.getStatus() != LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) {
String errorMessage;
ManagementError managementError = null;
if (response.getValue().getError() != null) {
errorMessage = response.getValue().getError().getMessage();
String errorBody = response.getValue().getError().getResponseBody();
if (errorBody != null) {
try {
managementError = this.getSerializerAdapter().deserialize(
errorBody,
ManagementError.class,
SerializerEncoding.JSON);
if (managementError.getCode() == null || managementError.getMessage() == null) {
managementError = null;
}
} catch (IOException ioe) {
logger.logThrowableAsWarning(ioe);
}
}
} else {
errorMessage = "Long running operation failed.";
}
if (managementError == null) {
managementError = new ManagementError(response.getStatus().toString(), errorMessage);
}
return Mono.error(new ManagementException(errorMessage, null, managementError));
} else {
return response.getFinalResult();
}
}
private static class DateTimeDeserializer extends JsonDeserializer<OffsetDateTime> {
public static SimpleModule getModule() {
SimpleModule module = new SimpleModule();
module.addDeserializer(OffsetDateTime.class, new DateTimeDeserializer());
return module;
}
@Override
public OffsetDateTime deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException, JsonProcessingException {
String string = jsonParser.getText();
TemporalAccessor temporal =
DateTimeFormatter.ISO_DATE_TIME.parseBest(string, OffsetDateTime::from, LocalDateTime::from);
if (temporal.query(TemporalQueries.offset()) == null) {
return LocalDateTime.from(temporal).atOffset(ZoneOffset.UTC);
} else {
return OffsetDateTime.from(temporal);
}
}
}
} | class AzureServiceClient {
private final ClientLogger logger = new ClientLogger(getClass());
private static final Map<String, String> PROPERTIES =
CoreUtils.getProperties("azure.properties");
private static final String SDK_VERSION;
static {
SDK_VERSION = PROPERTIES.getOrDefault("version", "UnknownVersion");
}
private final SerializerAdapter serializerAdapter = new AzureJacksonAdapter();
private final String sdkName;
protected AzureServiceClient(HttpPipeline httpPipeline, AzureEnvironment environment) {
sdkName = this.getClass().getPackage().getName();
((AzureJacksonAdapter) serializerAdapter).serializer().registerModule(DateTimeDeserializer.getModule());
}
/**
* Gets serializer adapter for JSON serialization/de-serialization.
*
* @return the serializer adapter.
*/
public SerializerAdapter getSerializerAdapter() {
return this.serializerAdapter;
}
/**
* Gets default client context.
*
* @return the default client context.
*/
/**
* Merges default client context with provided context.
*
* @param context the context to be merged with default client context.
* @return the merged context.
*/
public Context mergeContext(Context context) {
for (Map.Entry<Object, Object> entry : this.getContext().getValues().entrySet()) {
context = context.addData(entry.getKey(), entry.getValue());
}
return context;
}
/**
* Gets long running operation result.
*
* @param lroInit the raw response of init operation.
* @param httpPipeline the http pipeline.
* @param pollResultType type of poll result.
* @param finalResultType type of final result.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return poller flux for poll result and final result.
*/
public <T, U> PollerFlux<PollResult<T>, U> getLroResultAsync(Mono<Response<Flux<ByteBuffer>>> lroInit,
HttpPipeline httpPipeline,
Type pollResultType, Type finalResultType) {
return PollerFactory.create(
getSerializerAdapter(),
httpPipeline,
pollResultType,
finalResultType,
SdkContext.getLroRetryDuration(),
lroInit
);
}
/**
* Gets the final result, or an error, based on last async poll response.
*
* @param response the last async poll response.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return the final result, or an error.
*/
public <T, U> Mono<U> getLroFinalResultOrError(AsyncPollResponse<PollResult<T>, U> response) {
if (response.getStatus() != LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) {
String errorMessage;
ManagementError managementError = null;
if (response.getValue().getError() != null) {
errorMessage = response.getValue().getError().getMessage();
String errorBody = response.getValue().getError().getResponseBody();
if (errorBody != null) {
try {
managementError = this.getSerializerAdapter().deserialize(
errorBody,
ManagementError.class,
SerializerEncoding.JSON);
if (managementError.getCode() == null || managementError.getMessage() == null) {
managementError = null;
}
} catch (IOException ioe) {
logger.logThrowableAsWarning(ioe);
}
}
} else {
errorMessage = "Long running operation failed.";
}
if (managementError == null) {
managementError = new ManagementError(response.getStatus().toString(), errorMessage);
}
return Mono.error(new ManagementException(errorMessage, null, managementError));
} else {
return response.getFinalResult();
}
}
private static class DateTimeDeserializer extends JsonDeserializer<OffsetDateTime> {
public static SimpleModule getModule() {
SimpleModule module = new SimpleModule();
module.addDeserializer(OffsetDateTime.class, new DateTimeDeserializer());
return module;
}
@Override
public OffsetDateTime deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException, JsonProcessingException {
String string = jsonParser.getText();
TemporalAccessor temporal =
DateTimeFormatter.ISO_DATE_TIME.parseBest(string, OffsetDateTime::from, LocalDateTime::from);
if (temporal.query(TemporalQueries.offset()) == null) {
return LocalDateTime.from(temporal).atOffset(ZoneOffset.UTC);
} else {
return OffsetDateTime.from(temporal);
}
}
}
} |
got it. let's fix it before GA. | public Context getContext() {
if (sdkName == null) {
sdkName = this.getClass().getPackage().getName();
}
return new Context("Sdk-Name", sdkName)
.addData("Sdk-Version", SDK_VERSION);
} | .addData("Sdk-Version", SDK_VERSION); | public Context getContext() {
return new Context("Sdk-Name", sdkName)
.addData("Sdk-Version", SDK_VERSION);
} | class AzureServiceClient {
private final ClientLogger logger = new ClientLogger(getClass());
protected AzureServiceClient(HttpPipeline httpPipeline, AzureEnvironment environment) {
((AzureJacksonAdapter) serializerAdapter).serializer().registerModule(DateTimeDeserializer.getModule());
}
private static final String SDK_VERSION = "2.0.0-SNAPSHOT";
private final SerializerAdapter serializerAdapter = new AzureJacksonAdapter();
private String sdkName;
/**
* Gets serializer adapter for JSON serialization/de-serialization.
*
* @return the serializer adapter.
*/
public SerializerAdapter getSerializerAdapter() {
return this.serializerAdapter;
}
/**
* Gets default client context.
*
* @return the default client context.
*/
/**
* Merges default client context with provided context.
*
* @param context the context to be merged with default client context.
* @return the merged context.
*/
public Context mergeContext(Context context) {
for (Map.Entry<Object, Object> entry : this.getContext().getValues().entrySet()) {
context = context.addData(entry.getKey(), entry.getValue());
}
return context;
}
/**
* Gets long running operation result.
*
* @param lroInit the raw response of init operation.
* @param httpPipeline the http pipeline.
* @param pollResultType type of poll result.
* @param finalResultType type of final result.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return poller flux for poll result and final result.
*/
public <T, U> PollerFlux<PollResult<T>, U> getLroResultAsync(Mono<Response<Flux<ByteBuffer>>> lroInit,
HttpPipeline httpPipeline,
Type pollResultType, Type finalResultType) {
return PollerFactory.create(
getSerializerAdapter(),
httpPipeline,
pollResultType,
finalResultType,
SdkContext.getLroRetryDuration(),
lroInit
);
}
/**
* Gets the final result, or an error, based on last async poll response.
*
* @param response the last async poll response.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return the final result, or an error.
*/
public <T, U> Mono<U> getLroFinalResultOrError(AsyncPollResponse<PollResult<T>, U> response) {
if (response.getStatus() != LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) {
String errorMessage;
ManagementError managementError = null;
if (response.getValue().getError() != null) {
errorMessage = response.getValue().getError().getMessage();
String errorBody = response.getValue().getError().getResponseBody();
if (errorBody != null) {
try {
managementError = this.getSerializerAdapter().deserialize(
errorBody,
ManagementError.class,
SerializerEncoding.JSON);
if (managementError.getCode() == null || managementError.getMessage() == null) {
managementError = null;
}
} catch (IOException ioe) {
logger.logThrowableAsWarning(ioe);
}
}
} else {
errorMessage = "Long running operation failed.";
}
if (managementError == null) {
managementError = new ManagementError(response.getStatus().toString(), errorMessage);
}
return Mono.error(new ManagementException(errorMessage, null, managementError));
} else {
return response.getFinalResult();
}
}
private static class DateTimeDeserializer extends JsonDeserializer<OffsetDateTime> {
public static SimpleModule getModule() {
SimpleModule module = new SimpleModule();
module.addDeserializer(OffsetDateTime.class, new DateTimeDeserializer());
return module;
}
@Override
public OffsetDateTime deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException, JsonProcessingException {
String string = jsonParser.getText();
TemporalAccessor temporal =
DateTimeFormatter.ISO_DATE_TIME.parseBest(string, OffsetDateTime::from, LocalDateTime::from);
if (temporal.query(TemporalQueries.offset()) == null) {
return LocalDateTime.from(temporal).atOffset(ZoneOffset.UTC);
} else {
return OffsetDateTime.from(temporal);
}
}
}
} | class AzureServiceClient {
private final ClientLogger logger = new ClientLogger(getClass());
private static final Map<String, String> PROPERTIES =
CoreUtils.getProperties("azure.properties");
private static final String SDK_VERSION;
static {
SDK_VERSION = PROPERTIES.getOrDefault("version", "UnknownVersion");
}
private final SerializerAdapter serializerAdapter = new AzureJacksonAdapter();
private final String sdkName;
protected AzureServiceClient(HttpPipeline httpPipeline, AzureEnvironment environment) {
sdkName = this.getClass().getPackage().getName();
((AzureJacksonAdapter) serializerAdapter).serializer().registerModule(DateTimeDeserializer.getModule());
}
/**
* Gets serializer adapter for JSON serialization/de-serialization.
*
* @return the serializer adapter.
*/
public SerializerAdapter getSerializerAdapter() {
return this.serializerAdapter;
}
/**
* Gets default client context.
*
* @return the default client context.
*/
/**
* Merges default client context with provided context.
*
* @param context the context to be merged with default client context.
* @return the merged context.
*/
public Context mergeContext(Context context) {
for (Map.Entry<Object, Object> entry : this.getContext().getValues().entrySet()) {
context = context.addData(entry.getKey(), entry.getValue());
}
return context;
}
/**
* Gets long running operation result.
*
* @param lroInit the raw response of init operation.
* @param httpPipeline the http pipeline.
* @param pollResultType type of poll result.
* @param finalResultType type of final result.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return poller flux for poll result and final result.
*/
public <T, U> PollerFlux<PollResult<T>, U> getLroResultAsync(Mono<Response<Flux<ByteBuffer>>> lroInit,
HttpPipeline httpPipeline,
Type pollResultType, Type finalResultType) {
return PollerFactory.create(
getSerializerAdapter(),
httpPipeline,
pollResultType,
finalResultType,
SdkContext.getLroRetryDuration(),
lroInit
);
}
/**
* Gets the final result, or an error, based on last async poll response.
*
* @param response the last async poll response.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return the final result, or an error.
*/
public <T, U> Mono<U> getLroFinalResultOrError(AsyncPollResponse<PollResult<T>, U> response) {
if (response.getStatus() != LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) {
String errorMessage;
ManagementError managementError = null;
if (response.getValue().getError() != null) {
errorMessage = response.getValue().getError().getMessage();
String errorBody = response.getValue().getError().getResponseBody();
if (errorBody != null) {
try {
managementError = this.getSerializerAdapter().deserialize(
errorBody,
ManagementError.class,
SerializerEncoding.JSON);
if (managementError.getCode() == null || managementError.getMessage() == null) {
managementError = null;
}
} catch (IOException ioe) {
logger.logThrowableAsWarning(ioe);
}
}
} else {
errorMessage = "Long running operation failed.";
}
if (managementError == null) {
managementError = new ManagementError(response.getStatus().toString(), errorMessage);
}
return Mono.error(new ManagementException(errorMessage, null, managementError));
} else {
return response.getFinalResult();
}
}
private static class DateTimeDeserializer extends JsonDeserializer<OffsetDateTime> {
public static SimpleModule getModule() {
SimpleModule module = new SimpleModule();
module.addDeserializer(OffsetDateTime.class, new DateTimeDeserializer());
return module;
}
@Override
public OffsetDateTime deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException, JsonProcessingException {
String string = jsonParser.getText();
TemporalAccessor temporal =
DateTimeFormatter.ISO_DATE_TIME.parseBest(string, OffsetDateTime::from, LocalDateTime::from);
if (temporal.query(TemporalQueries.offset()) == null) {
return LocalDateTime.from(temporal).atOffset(ZoneOffset.UTC);
} else {
return OffsetDateTime.from(temporal);
}
}
}
} |
already fixed in this PR. now the flow is: 1. maven package the project, one step it would populate `azure.properties` with its current `project.version`. 2. jar now has this `azure.properties` packaged and released with the jar 3. runtime, code read version from `azure.properties` | public Context getContext() {
if (sdkName == null) {
sdkName = this.getClass().getPackage().getName();
}
return new Context("Sdk-Name", sdkName)
.addData("Sdk-Version", SDK_VERSION);
} | .addData("Sdk-Version", SDK_VERSION); | public Context getContext() {
return new Context("Sdk-Name", sdkName)
.addData("Sdk-Version", SDK_VERSION);
} | class AzureServiceClient {
private final ClientLogger logger = new ClientLogger(getClass());
protected AzureServiceClient(HttpPipeline httpPipeline, AzureEnvironment environment) {
((AzureJacksonAdapter) serializerAdapter).serializer().registerModule(DateTimeDeserializer.getModule());
}
private static final String SDK_VERSION = "2.0.0-SNAPSHOT";
private final SerializerAdapter serializerAdapter = new AzureJacksonAdapter();
private String sdkName;
/**
* Gets serializer adapter for JSON serialization/de-serialization.
*
* @return the serializer adapter.
*/
public SerializerAdapter getSerializerAdapter() {
return this.serializerAdapter;
}
/**
* Gets default client context.
*
* @return the default client context.
*/
/**
* Merges default client context with provided context.
*
* @param context the context to be merged with default client context.
* @return the merged context.
*/
public Context mergeContext(Context context) {
for (Map.Entry<Object, Object> entry : this.getContext().getValues().entrySet()) {
context = context.addData(entry.getKey(), entry.getValue());
}
return context;
}
/**
* Gets long running operation result.
*
* @param lroInit the raw response of init operation.
* @param httpPipeline the http pipeline.
* @param pollResultType type of poll result.
* @param finalResultType type of final result.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return poller flux for poll result and final result.
*/
public <T, U> PollerFlux<PollResult<T>, U> getLroResultAsync(Mono<Response<Flux<ByteBuffer>>> lroInit,
HttpPipeline httpPipeline,
Type pollResultType, Type finalResultType) {
return PollerFactory.create(
getSerializerAdapter(),
httpPipeline,
pollResultType,
finalResultType,
SdkContext.getLroRetryDuration(),
lroInit
);
}
/**
* Gets the final result, or an error, based on last async poll response.
*
* @param response the last async poll response.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return the final result, or an error.
*/
public <T, U> Mono<U> getLroFinalResultOrError(AsyncPollResponse<PollResult<T>, U> response) {
if (response.getStatus() != LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) {
String errorMessage;
ManagementError managementError = null;
if (response.getValue().getError() != null) {
errorMessage = response.getValue().getError().getMessage();
String errorBody = response.getValue().getError().getResponseBody();
if (errorBody != null) {
try {
managementError = this.getSerializerAdapter().deserialize(
errorBody,
ManagementError.class,
SerializerEncoding.JSON);
if (managementError.getCode() == null || managementError.getMessage() == null) {
managementError = null;
}
} catch (IOException ioe) {
logger.logThrowableAsWarning(ioe);
}
}
} else {
errorMessage = "Long running operation failed.";
}
if (managementError == null) {
managementError = new ManagementError(response.getStatus().toString(), errorMessage);
}
return Mono.error(new ManagementException(errorMessage, null, managementError));
} else {
return response.getFinalResult();
}
}
private static class DateTimeDeserializer extends JsonDeserializer<OffsetDateTime> {
public static SimpleModule getModule() {
SimpleModule module = new SimpleModule();
module.addDeserializer(OffsetDateTime.class, new DateTimeDeserializer());
return module;
}
@Override
public OffsetDateTime deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException, JsonProcessingException {
String string = jsonParser.getText();
TemporalAccessor temporal =
DateTimeFormatter.ISO_DATE_TIME.parseBest(string, OffsetDateTime::from, LocalDateTime::from);
if (temporal.query(TemporalQueries.offset()) == null) {
return LocalDateTime.from(temporal).atOffset(ZoneOffset.UTC);
} else {
return OffsetDateTime.from(temporal);
}
}
}
} | class AzureServiceClient {
private final ClientLogger logger = new ClientLogger(getClass());
private static final Map<String, String> PROPERTIES =
CoreUtils.getProperties("azure.properties");
private static final String SDK_VERSION;
static {
SDK_VERSION = PROPERTIES.getOrDefault("version", "UnknownVersion");
}
private final SerializerAdapter serializerAdapter = new AzureJacksonAdapter();
private final String sdkName;
protected AzureServiceClient(HttpPipeline httpPipeline, AzureEnvironment environment) {
sdkName = this.getClass().getPackage().getName();
((AzureJacksonAdapter) serializerAdapter).serializer().registerModule(DateTimeDeserializer.getModule());
}
/**
* Gets serializer adapter for JSON serialization/de-serialization.
*
* @return the serializer adapter.
*/
public SerializerAdapter getSerializerAdapter() {
return this.serializerAdapter;
}
/**
* Gets default client context.
*
* @return the default client context.
*/
/**
* Merges default client context with provided context.
*
* @param context the context to be merged with default client context.
* @return the merged context.
*/
public Context mergeContext(Context context) {
for (Map.Entry<Object, Object> entry : this.getContext().getValues().entrySet()) {
context = context.addData(entry.getKey(), entry.getValue());
}
return context;
}
/**
* Gets long running operation result.
*
* @param lroInit the raw response of init operation.
* @param httpPipeline the http pipeline.
* @param pollResultType type of poll result.
* @param finalResultType type of final result.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return poller flux for poll result and final result.
*/
public <T, U> PollerFlux<PollResult<T>, U> getLroResultAsync(Mono<Response<Flux<ByteBuffer>>> lroInit,
HttpPipeline httpPipeline,
Type pollResultType, Type finalResultType) {
return PollerFactory.create(
getSerializerAdapter(),
httpPipeline,
pollResultType,
finalResultType,
SdkContext.getLroRetryDuration(),
lroInit
);
}
/**
* Gets the final result, or an error, based on last async poll response.
*
* @param response the last async poll response.
* @param <T> type of poll result.
* @param <U> type of final result.
* @return the final result, or an error.
*/
public <T, U> Mono<U> getLroFinalResultOrError(AsyncPollResponse<PollResult<T>, U> response) {
if (response.getStatus() != LongRunningOperationStatus.SUCCESSFULLY_COMPLETED) {
String errorMessage;
ManagementError managementError = null;
if (response.getValue().getError() != null) {
errorMessage = response.getValue().getError().getMessage();
String errorBody = response.getValue().getError().getResponseBody();
if (errorBody != null) {
try {
managementError = this.getSerializerAdapter().deserialize(
errorBody,
ManagementError.class,
SerializerEncoding.JSON);
if (managementError.getCode() == null || managementError.getMessage() == null) {
managementError = null;
}
} catch (IOException ioe) {
logger.logThrowableAsWarning(ioe);
}
}
} else {
errorMessage = "Long running operation failed.";
}
if (managementError == null) {
managementError = new ManagementError(response.getStatus().toString(), errorMessage);
}
return Mono.error(new ManagementException(errorMessage, null, managementError));
} else {
return response.getFinalResult();
}
}
private static class DateTimeDeserializer extends JsonDeserializer<OffsetDateTime> {
public static SimpleModule getModule() {
SimpleModule module = new SimpleModule();
module.addDeserializer(OffsetDateTime.class, new DateTimeDeserializer());
return module;
}
@Override
public OffsetDateTime deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException, JsonProcessingException {
String string = jsonParser.getText();
TemporalAccessor temporal =
DateTimeFormatter.ISO_DATE_TIME.parseBest(string, OffsetDateTime::from, LocalDateTime::from);
if (temporal.query(TemporalQueries.offset()) == null) {
return LocalDateTime.from(temporal).atOffset(ZoneOffset.UTC);
} else {
return OffsetDateTime.from(temporal);
}
}
}
} |
I think options can be null here, right? | Mono<Response<String>> renewLeaseWithResponse(BlobRenewLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().renewLeaseWithRestResponseAsync(null, null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
requestConditions.getIfMatch(), requestConditions.getIfNoneMatch(), requestConditions.getIfTags(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
} else {
return this.client.containers().renewLeaseWithRestResponseAsync(null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
null, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
}
} | StorageImplUtils.assertNotNull("options", options); | new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().acquireLeaseWithRestResponseAsync(null, null, null,
options.getDuration(), this.leaseId, requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
} | class BlobLeaseAsyncClient {
private final ClientLogger logger = new ClientLogger(BlobLeaseAsyncClient.class);
private final boolean isBlob;
private final String leaseId;
private final AzureBlobStorageImpl client;
private final String accountName;
BlobLeaseAsyncClient(HttpPipeline pipeline, String url, String leaseId, boolean isBlob, String accountName,
String serviceVersion) {
this.isBlob = isBlob;
this.leaseId = leaseId;
this.client = new AzureBlobStorageBuilder()
.pipeline(pipeline)
.url(url)
.version(serviceVersion)
.build();
this.accountName = accountName;
}
/**
* Gets the {@link URL} of the lease client.
*
* <p>The lease will either be a container or blob URL depending on which the lease client is associated.</p>
*
* @return URL of the lease client.
*/
public String getResourceUrl() {
return this.client.getUrl();
}
/**
* Get the lease ID for this lease.
*
* @return the lease ID.
*/
public String getLeaseId() {
return leaseId;
}
/**
* Acquires a lease for write and delete operations. The lease duration must be between 15 to 60 seconds or -1 for
* an infinite duration.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.acquireLease
*
* @param duration The duration of the lease between 15 to 60 seconds or -1 for an infinite duration.
* @return A reactive response containing the lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<String> acquireLease(int duration) {
try {
return acquireLeaseWithResponse(duration, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Acquires a lease for write and delete operations. The lease duration must be between 15 to 60 seconds, or -1 for
* an infinite duration.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.acquireLeaseWithResponse
*
* @param duration The duration of the lease between 15 to 60 seconds or -1 for an infinite duration.
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response containing the lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> acquireLeaseWithResponse(int duration, RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> acquireLeaseWithResponse(new BlobAcquireLeaseOptions(duration)
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Acquires a lease for write and delete operations. The lease duration must be between 15 to 60 seconds, or -1 for
* an infinite duration.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.acquireLeaseWithResponse
*
* @param options {@link BlobAcquireLeaseOptions}
* @return A reactive response containing the lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> acquireLeaseWithResponse(BlobAcquireLeaseOptions options) {
try {
return withContext(context -> acquireLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<String>> acquireLeaseWithResponse(BlobAcquireLeaseOptions options,
Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? else {
return this.client.containers().acquireLeaseWithRestResponseAsync(null, null,
options.getDuration(), this.leaseId, requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), null, context)
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
}
}
/**
* Renews the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.renewLease}
*
* @return A reactive response containing the renewed lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<String> renewLease() {
try {
return renewLeaseWithResponse((RequestConditions) null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Renews the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.renewLeaseWithResponse
*
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response containing the renewed lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> renewLeaseWithResponse(RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> renewLeaseWithResponse(new BlobRenewLeaseOptions()
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Renews the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.renewLeaseWithResponse
*
* @param options {@link BlobRenewLeaseOptions}
* @return A reactive response containing the renewed lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> renewLeaseWithResponse(BlobRenewLeaseOptions options) {
try {
return withContext(context -> renewLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<String>> renewLeaseWithResponse(BlobRenewLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().renewLeaseWithRestResponseAsync(null, null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
requestConditions.getIfMatch(), requestConditions.getIfNoneMatch(), requestConditions.getIfTags(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
} else {
return this.client.containers().renewLeaseWithRestResponseAsync(null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
null, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
}
}
/**
* Releases the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.releaseLease}
*
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> releaseLease() {
try {
return releaseLeaseWithResponse((RequestConditions) null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Releases the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.releaseLeaseWithResponse
*
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> releaseLeaseWithResponse(RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> releaseLeaseWithResponse(new BlobReleaseLeaseOptions()
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Releases the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.releaseLeaseWithResponse
*
* @param options {@link BlobReleaseLeaseOptions}
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> releaseLeaseWithResponse(BlobReleaseLeaseOptions options) {
try {
return withContext(context -> releaseLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Void>> releaseLeaseWithResponse(BlobReleaseLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().releaseLeaseWithRestResponseAsync(null, null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
requestConditions.getIfMatch(), requestConditions.getIfNoneMatch(), requestConditions.getIfTags(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
} else {
return this.client.containers().releaseLeaseWithRestResponseAsync(null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
null, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
}
}
/**
* Breaks the previously acquired lease, if it exists.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.breakLease}
*
* @return A reactive response containing the remaining time in the broken lease in seconds.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Integer> breakLease() {
try {
return breakLeaseWithResponse((Integer) null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Breaks the previously acquired lease, if it exists.
*
* <p>If {@code null} is passed for {@code breakPeriodInSeconds} a fixed duration lease will break after the
* remaining lease period elapses and an infinite lease will break immediately.</p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.breakLeaseWithResponse
*
* @param breakPeriodInSeconds An optional duration, between 0 and 60 seconds, that the lease should continue before
* it is broken. If the break period is longer than the time remaining on the lease the remaining time on the lease
* is used. A new lease will not be available before the break period has expired, but the lease may be held for
* longer than the break period.
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response containing the remaining time in the broken lease in seconds.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Integer>> breakLeaseWithResponse(Integer breakPeriodInSeconds,
RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> breakLeaseWithResponse(new BlobBreakLeaseOptions()
.setBreakPeriodInSeconds(breakPeriodInSeconds)
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Breaks the previously acquired lease, if it exists.
*
* <p>If {@code null} is passed for {@code breakPeriodInSeconds} a fixed duration lease will break after the
* remaining lease period elapses and an infinite lease will break immediately.</p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.breakLeaseWithResponse
*
* @param options {@link BlobBreakLeaseOptions}
* @return A reactive response containing the remaining time in the broken lease in seconds.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Integer>> breakLeaseWithResponse(BlobBreakLeaseOptions options) {
try {
return withContext(context -> breakLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Integer>> breakLeaseWithResponse(BlobBreakLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().breakLeaseWithRestResponseAsync(null, null, null,
options.getBreakPeriodInSeconds(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getIfTags(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseTime()));
} else {
return this.client.containers().breakLeaseWithRestResponseAsync(null, null,
options.getBreakPeriodInSeconds(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), null, context)
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseTime()));
}
}
/**
* Changes the lease ID.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.changeLease
*
* @param proposedId A new lease ID in a valid GUID format.
* @return A reactive response containing the new lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<String> changeLease(String proposedId) {
try {
return changeLeaseWithResponse(proposedId, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the lease ID.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.changeLeaseWithResponse
*
* @param proposedId A new lease ID in a valid GUID format.
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response containing the new lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> changeLeaseWithResponse(String proposedId,
RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> changeLeaseWithResponse(new BlobChangeLeaseOptions(proposedId)
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the lease ID.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.changeLeaseWithResponse
*
* @param options {@link BlobChangeLeaseOptions}
* @return A reactive response containing the new lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> changeLeaseWithResponse(BlobChangeLeaseOptions options) {
try {
return withContext(context -> changeLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<String>> changeLeaseWithResponse(BlobChangeLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().changeLeaseWithRestResponseAsync(null, null, this.leaseId,
options.getProposedId(), null, requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getIfTags(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
} else {
return this.client.containers().changeLeaseWithRestResponseAsync(null, this.leaseId,
options.getProposedId(), null, requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
}
}
/**
* Get associated account name.
*
* @return account name associated with this storage resource.
*/
public String getAccountName() {
return this.accountName;
}
} | class BlobLeaseAsyncClient {
private final ClientLogger logger = new ClientLogger(BlobLeaseAsyncClient.class);
private final boolean isBlob;
private final String leaseId;
private final AzureBlobStorageImpl client;
private final String accountName;
BlobLeaseAsyncClient(HttpPipeline pipeline, String url, String leaseId, boolean isBlob, String accountName,
String serviceVersion) {
this.isBlob = isBlob;
this.leaseId = leaseId;
this.client = new AzureBlobStorageBuilder()
.pipeline(pipeline)
.url(url)
.version(serviceVersion)
.build();
this.accountName = accountName;
}
/**
* Gets the {@link URL} of the lease client.
*
* <p>The lease will either be a container or blob URL depending on which the lease client is associated.</p>
*
* @return URL of the lease client.
*/
public String getResourceUrl() {
return this.client.getUrl();
}
/**
* Get the lease ID for this lease.
*
* @return the lease ID.
*/
public String getLeaseId() {
return leaseId;
}
/**
* Acquires a lease for write and delete operations. The lease duration must be between 15 to 60 seconds or -1 for
* an infinite duration.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.acquireLease
*
* @param duration The duration of the lease between 15 to 60 seconds or -1 for an infinite duration.
* @return A reactive response containing the lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<String> acquireLease(int duration) {
try {
return acquireLeaseWithResponse(duration, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Acquires a lease for write and delete operations. The lease duration must be between 15 to 60 seconds, or -1 for
* an infinite duration.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.acquireLeaseWithResponse
*
* @param duration The duration of the lease between 15 to 60 seconds or -1 for an infinite duration.
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response containing the lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> acquireLeaseWithResponse(int duration, RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> acquireLeaseWithResponse(new BlobAcquireLeaseOptions(duration)
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Acquires a lease for write and delete operations. The lease duration must be between 15 to 60 seconds, or -1 for
* an infinite duration.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.acquireLeaseWithResponse
*
* @param options {@link BlobAcquireLeaseOptions}
* @return A reactive response containing the lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> acquireLeaseWithResponse(BlobAcquireLeaseOptions options) {
try {
return withContext(context -> acquireLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<String>> acquireLeaseWithResponse(BlobAcquireLeaseOptions options,
Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? else {
return this.client.containers().acquireLeaseWithRestResponseAsync(null, null,
options.getDuration(), this.leaseId, requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), null, context)
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
}
}
/**
* Renews the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.renewLease}
*
* @return A reactive response containing the renewed lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<String> renewLease() {
try {
return renewLeaseWithResponse((RequestConditions) null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Renews the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.renewLeaseWithResponse
*
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response containing the renewed lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> renewLeaseWithResponse(RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> renewLeaseWithResponse(new BlobRenewLeaseOptions()
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Renews the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.renewLeaseWithResponse
*
* @param options {@link BlobRenewLeaseOptions}
* @return A reactive response containing the renewed lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> renewLeaseWithResponse(BlobRenewLeaseOptions options) {
try {
return withContext(context -> renewLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<String>> renewLeaseWithResponse(BlobRenewLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().renewLeaseWithRestResponseAsync(null, null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
requestConditions.getIfMatch(), requestConditions.getIfNoneMatch(),
requestConditions.getTagsConditions(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
} else {
return this.client.containers().renewLeaseWithRestResponseAsync(null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
null, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
}
}
/**
* Releases the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.releaseLease}
*
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> releaseLease() {
try {
return releaseLeaseWithResponse((RequestConditions) null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Releases the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.releaseLeaseWithResponse
*
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> releaseLeaseWithResponse(RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> releaseLeaseWithResponse(new BlobReleaseLeaseOptions()
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Releases the previously acquired lease.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.releaseLeaseWithResponse
*
* @param options {@link BlobReleaseLeaseOptions}
* @return A reactive response signalling completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> releaseLeaseWithResponse(BlobReleaseLeaseOptions options) {
try {
return withContext(context -> releaseLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Void>> releaseLeaseWithResponse(BlobReleaseLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().releaseLeaseWithRestResponseAsync(null, null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
requestConditions.getIfMatch(), requestConditions.getIfNoneMatch(),
requestConditions.getTagsConditions(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
} else {
return this.client.containers().releaseLeaseWithRestResponseAsync(null, this.leaseId, null,
requestConditions.getIfModifiedSince(), requestConditions.getIfUnmodifiedSince(),
null, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
}
}
/**
* Breaks the previously acquired lease, if it exists.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.breakLease}
*
* @return A reactive response containing the remaining time in the broken lease in seconds.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Integer> breakLease() {
try {
return breakLeaseWithResponse((Integer) null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Breaks the previously acquired lease, if it exists.
*
* <p>If {@code null} is passed for {@code breakPeriodInSeconds} a fixed duration lease will break after the
* remaining lease period elapses and an infinite lease will break immediately.</p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.breakLeaseWithResponse
*
* @param breakPeriodInSeconds An optional duration, between 0 and 60 seconds, that the lease should continue before
* it is broken. If the break period is longer than the time remaining on the lease the remaining time on the lease
* is used. A new lease will not be available before the break period has expired, but the lease may be held for
* longer than the break period.
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response containing the remaining time in the broken lease in seconds.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Integer>> breakLeaseWithResponse(Integer breakPeriodInSeconds,
RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> breakLeaseWithResponse(new BlobBreakLeaseOptions()
.setBreakPeriodInSeconds(breakPeriodInSeconds)
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Breaks the previously acquired lease, if it exists.
*
* <p>If {@code null} is passed for {@code breakPeriodInSeconds} a fixed duration lease will break after the
* remaining lease period elapses and an infinite lease will break immediately.</p>
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.breakLeaseWithResponse
*
* @param options {@link BlobBreakLeaseOptions}
* @return A reactive response containing the remaining time in the broken lease in seconds.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Integer>> breakLeaseWithResponse(BlobBreakLeaseOptions options) {
try {
return withContext(context -> breakLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Integer>> breakLeaseWithResponse(BlobBreakLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().breakLeaseWithRestResponseAsync(null, null, null,
options.getBreakPeriodInSeconds(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseTime()));
} else {
return this.client.containers().breakLeaseWithRestResponseAsync(null, null,
options.getBreakPeriodInSeconds(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), null, context)
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseTime()));
}
}
/**
* Changes the lease ID.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.changeLease
*
* @param proposedId A new lease ID in a valid GUID format.
* @return A reactive response containing the new lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<String> changeLease(String proposedId) {
try {
return changeLeaseWithResponse(proposedId, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the lease ID.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.changeLeaseWithResponse
*
* @param proposedId A new lease ID in a valid GUID format.
* @param modifiedRequestConditions Standard HTTP Access conditions related to the modification of data. ETag and
* LastModifiedTime are used to construct conditions related to when the resource was changed relative to the given
* request. The request will fail if the specified condition is not satisfied.
* @return A reactive response containing the new lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> changeLeaseWithResponse(String proposedId,
RequestConditions modifiedRequestConditions) {
try {
return withContext(context -> changeLeaseWithResponse(new BlobChangeLeaseOptions(proposedId)
.setRequestConditions(ModelHelper.populateBlobLeaseRequestConditions(modifiedRequestConditions)),
context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Changes the lease ID.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.BlobLeaseAsyncClient.changeLeaseWithResponse
*
* @param options {@link BlobChangeLeaseOptions}
* @return A reactive response containing the new lease ID.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<String>> changeLeaseWithResponse(BlobChangeLeaseOptions options) {
try {
return withContext(context -> changeLeaseWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<String>> changeLeaseWithResponse(BlobChangeLeaseOptions options, Context context) {
StorageImplUtils.assertNotNull("options", options);
BlobLeaseRequestConditions requestConditions = (options.getRequestConditions() == null)
? new BlobLeaseRequestConditions() : options.getRequestConditions();
context = context == null ? Context.NONE : context;
if (this.isBlob) {
return this.client.blobs().changeLeaseWithRestResponseAsync(null, null, this.leaseId,
options.getProposedId(), null, requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
} else {
return this.client.containers().changeLeaseWithRestResponseAsync(null, this.leaseId,
options.getProposedId(), null, requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), null,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> new SimpleResponse<>(rb, rb.getDeserializedHeaders().getLeaseId()));
}
}
/**
* Get associated account name.
*
* @return account name associated with this storage resource.
*/
public String getAccountName() {
return this.accountName;
}
} |
Should be in `finally`? | private static void extraTarGzSource(File folder, URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) {
TarArchiveEntry entry;
while ((entry = inputStream.getNextTarEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
File file = new File(folder, entry.getName());
File parent = file.getParentFile();
if (!parent.exists()) {
parent.mkdirs();
}
try (OutputStream outputStream = new FileOutputStream(file)) {
IOUtils.copy(inputStream, outputStream);
}
}
}
connection.disconnect();
} | connection.disconnect(); | private static void extraTarGzSource(File folder, URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) {
TarArchiveEntry entry;
while ((entry = inputStream.getNextTarEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
File file = new File(folder, entry.getName());
File parent = file.getParentFile();
if (parent.exists() || parent.mkdirs()) {
try (OutputStream outputStream = new FileOutputStream(file)) {
IOUtils.copy(inputStream, outputStream);
}
} else {
throw new IllegalStateException("Cannot create directory: " + parent.getAbsolutePath());
}
}
} finally {
connection.disconnect();
}
} | class ManageSpringCloud {
private static final String PIGGYMETRICS_TAR_GZ_URL = "https:
private static final String SPRING_CLOUD_SERVICE_PRINCIPAL = "03b39d0f-4213-4864-a245-b1476ec03169";
/**
* Main function which runs the actual sample.
* @param azure instance of the azure client
* @param clientId the aad client id in azure instance
* @return true if sample runs successfully
*/
public static boolean runSample(Azure azure, String clientId) {
final String rgName = azure.sdkContext().randomResourceName("rg", 24);
final String serviceName = azure.sdkContext().randomResourceName("service", 24);
final Region region = Region.US_EAST;
final String domainName = azure.sdkContext().randomResourceName("jsdkdemo-", 20) + ".com";
final String certOrderName = azure.sdkContext().randomResourceName("cert", 15);
final String vaultName = azure.sdkContext().randomResourceName("vault", 15);
final String certName = azure.sdkContext().randomResourceName("cert", 15);
try {
azure.resourceGroups().define(rgName)
.withRegion(region)
.create();
System.out.printf("Creating spring cloud service %s in resource group %s ...%n", serviceName, rgName);
SpringService service = azure.springServices().define(serviceName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.create();
System.out.printf("Created spring cloud service %s%n", service.name());
Utils.print(service);
File sourceCodeFolder = new File("piggymetrics");
if (!sourceCodeFolder.exists() || sourceCodeFolder.isFile()) {
if (sourceCodeFolder.isFile() && !sourceCodeFolder.delete()) {
throw new IllegalStateException("could not delete piggymetrics file");
}
extraTarGzSource(sourceCodeFolder, new URL(PIGGYMETRICS_TAR_GZ_URL));
}
System.out.printf("Creating spring cloud app gateway in resource group %s ...%n", rgName);
SpringApp gateway = service.apps().define("gateway")
.withDefaultPublicEndpoint()
.withHttpsOnly()
.deploySource("default", sourceCodeFolder, "gateway")
.create();
System.out.println("Created spring cloud service gateway");
Utils.print(gateway);
System.out.printf("Creating spring cloud app auth-service in resource group %s ...%n", rgName);
SpringApp authService = service.apps().define("auth-service")
.deploySource("default", sourceCodeFolder, "auth-service")
.create();
System.out.println("Created spring cloud service auth-service");
Utils.print(authService);
System.out.printf("Creating spring cloud app account-service in resource group %s ...%n", rgName);
SpringApp accountService = service.apps().define("account-service")
.deploySource("default", sourceCodeFolder, "account-service")
.create();
System.out.println("Created spring cloud service account-service");
Utils.print(accountService);
System.out.println("Purchasing a domain " + domainName + "...");
AppServiceDomain domain = azure.appServiceDomains().define(domainName)
.withExistingResourceGroup(rgName)
.defineRegistrantContact()
.withFirstName("Jon")
.withLastName("Doe")
.withEmail("jondoe@contoso.com")
.withAddressLine1("123 4th Ave")
.withCity("Redmond")
.withStateOrProvince("WA")
.withCountry(CountryIsoCode.UNITED_STATES)
.withPostalCode("98052")
.withPhoneCountryCode(CountryPhoneCode.UNITED_STATES)
.withPhoneNumber("4258828080")
.attach()
.withDomainPrivacyEnabled(true)
.withAutoRenewEnabled(false)
.create();
System.out.println("Purchased domain " + domain.name());
Utils.print(domain);
DnsZone dnsZone = azure.dnsZones().getById(domain.dnsZoneId());
gateway.refresh();
System.out.printf("Updating dns with CNAME ssl.%s to %s%n", domainName, gateway.fqdn());
dnsZone.update()
.withCNameRecordSet("ssl", gateway.fqdn())
.apply();
System.out.printf("Purchasing a certificate for *.%s and save to %s in key vault named %s ...%n", domainName, certOrderName, vaultName);
AppServiceCertificateOrder certificateOrder = azure.appServiceCertificateOrders().define(certOrderName)
.withExistingResourceGroup(rgName)
.withHostName(String.format("*.%s", domainName))
.withWildcardSku()
.withDomainVerification(domain)
.withNewKeyVault(vaultName, region)
.withAutoRenew(true)
.create();
System.out.printf("Purchased certificate: *.%s ...%n", domain.name());
Utils.print(certificateOrder);
System.out.printf("Updating key vault %s with access from %s, %s%n", vaultName, clientId, SPRING_CLOUD_SERVICE_PRINCIPAL);
Vault vault = azure.vaults().getByResourceGroup(rgName, vaultName);
vault.update()
.defineAccessPolicy()
.forServicePrincipal(clientId)
.allowSecretAllPermissions()
.allowCertificateAllPermissions()
.attach()
.defineAccessPolicy()
.forServicePrincipal(SPRING_CLOUD_SERVICE_PRINCIPAL)
.allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST)
.allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST)
.attach()
.apply();
System.out.printf("Updated key vault %s%n", vault.name());
Utils.print(vault);
Secret secret = vault.secrets().getByName(certOrderName);
byte[] certificate = Base64.getDecoder().decode(secret.value());
String thumbprint = secret.tags().get("Thumbprint");
if (thumbprint == null || thumbprint.isEmpty()) {
KeyStore store = KeyStore.getInstance("PKCS12");
store.load(new ByteArrayInputStream(certificate), null);
String alias = Collections.list(store.aliases()).get(0);
thumbprint = DatatypeConverter.printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded()));
}
System.out.printf("Get certificate: %s%n", secret.value());
System.out.printf("Certificate Thumbprint: %s%n", thumbprint);
CertificateClient certificateClient = new CertificateClientBuilder()
.vaultUrl(vault.vaultUri())
.pipeline(service.manager().httpPipeline())
.buildClient();
System.out.printf("Uploading certificate to %s in key vault ...%n", certName);
certificateClient.importCertificate(
new ImportCertificateOptions(certName, certificate)
.setEnabled(true)
);
System.out.println("Updating Spring Cloud Service with certificate ...");
service.update()
.withCertificate(certName, vault.vaultUri(), certName)
.apply();
System.out.printf("Updating Spring Cloud App with domain ssl.%s ...", domainName);
gateway.update()
.withCustomDomain(String.format("ssl.%s", domainName), thumbprint)
.apply();
System.out.printf("Successfully expose domain ssl.%s", domainName);
return true;
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
} finally {
try {
System.out.println("Delete Resource Group: " + rgName);
azure.resourceGroups().beginDeleteByName(rgName);
} catch (NullPointerException npe) {
System.out.println("Did not create any resources in Azure. No clean up is necessary");
} catch (Exception g) {
g.printStackTrace();
}
}
return false;
}
/**
* Main entry point.
* @param args the parameters
*/
public static void main(String[] args) {
try {
final AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE);
final TokenCredential credential = new DefaultAzureCredentialBuilder()
.authorityHost(profile.environment().getActiveDirectoryEndpoint())
.build();
Azure azure = Azure
.configure()
.withLogLevel(HttpLogDetailLevel.BASIC)
.authenticate(credential, profile)
.withDefaultSubscription();
System.out.println("Selected subscription: " + azure.subscriptionId());
runSample(azure, Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_CLIENT_ID));
} catch (Exception e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
}
} | class ManageSpringCloud {
private static final String PIGGYMETRICS_TAR_GZ_URL = "https:
private static final String SPRING_CLOUD_SERVICE_PRINCIPAL = "03b39d0f-4213-4864-a245-b1476ec03169";
/**
* Main function which runs the actual sample.
* @param azure instance of the azure client
* @param clientId the aad client id in azure instance
* @return true if sample runs successfully
* @throws IllegalStateException unexcepted state
*/
public static boolean runSample(Azure azure, String clientId) {
final String rgName = azure.sdkContext().randomResourceName("rg", 24);
final String serviceName = azure.sdkContext().randomResourceName("service", 24);
final Region region = Region.US_EAST;
final String domainName = azure.sdkContext().randomResourceName("jsdkdemo-", 20) + ".com";
final String certOrderName = azure.sdkContext().randomResourceName("cert", 15);
final String vaultName = azure.sdkContext().randomResourceName("vault", 15);
final String certName = azure.sdkContext().randomResourceName("cert", 15);
try {
azure.resourceGroups().define(rgName)
.withRegion(region)
.create();
System.out.printf("Creating spring cloud service %s in resource group %s ...%n", serviceName, rgName);
SpringService service = azure.springServices().define(serviceName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.create();
System.out.printf("Created spring cloud service %s%n", service.name());
Utils.print(service);
File sourceCodeFolder = new File("piggymetrics");
if (!sourceCodeFolder.exists() || sourceCodeFolder.isFile()) {
if (sourceCodeFolder.isFile() && !sourceCodeFolder.delete()) {
throw new IllegalStateException("could not delete piggymetrics file");
}
extraTarGzSource(sourceCodeFolder, new URL(PIGGYMETRICS_TAR_GZ_URL));
}
System.out.printf("Creating spring cloud app gateway in resource group %s ...%n", rgName);
SpringApp gateway = service.apps().define("gateway")
.withDefaultPublicEndpoint()
.withHttpsOnly()
.deploySource("default", sourceCodeFolder, "gateway")
.create();
System.out.println("Created spring cloud service gateway");
Utils.print(gateway);
System.out.printf("Creating spring cloud app auth-service in resource group %s ...%n", rgName);
SpringApp authService = service.apps().define("auth-service")
.deploySource("default", sourceCodeFolder, "auth-service")
.create();
System.out.println("Created spring cloud service auth-service");
Utils.print(authService);
System.out.printf("Creating spring cloud app account-service in resource group %s ...%n", rgName);
SpringApp accountService = service.apps().define("account-service")
.deploySource("default", sourceCodeFolder, "account-service")
.create();
System.out.println("Created spring cloud service account-service");
Utils.print(accountService);
System.out.println("Purchasing a domain " + domainName + "...");
AppServiceDomain domain = azure.appServiceDomains().define(domainName)
.withExistingResourceGroup(rgName)
.defineRegistrantContact()
.withFirstName("Jon")
.withLastName("Doe")
.withEmail("jondoe@contoso.com")
.withAddressLine1("123 4th Ave")
.withCity("Redmond")
.withStateOrProvince("WA")
.withCountry(CountryIsoCode.UNITED_STATES)
.withPostalCode("98052")
.withPhoneCountryCode(CountryPhoneCode.UNITED_STATES)
.withPhoneNumber("4258828080")
.attach()
.withDomainPrivacyEnabled(true)
.withAutoRenewEnabled(false)
.create();
System.out.println("Purchased domain " + domain.name());
Utils.print(domain);
DnsZone dnsZone = azure.dnsZones().getById(domain.dnsZoneId());
gateway.refresh();
System.out.printf("Updating dns with CNAME ssl.%s to %s%n", domainName, gateway.fqdn());
dnsZone.update()
.withCNameRecordSet("ssl", gateway.fqdn())
.apply();
System.out.printf("Purchasing a certificate for *.%s and save to %s in key vault named %s ...%n", domainName, certOrderName, vaultName);
AppServiceCertificateOrder certificateOrder = azure.appServiceCertificateOrders().define(certOrderName)
.withExistingResourceGroup(rgName)
.withHostName(String.format("*.%s", domainName))
.withWildcardSku()
.withDomainVerification(domain)
.withNewKeyVault(vaultName, region)
.withAutoRenew(true)
.create();
System.out.printf("Purchased certificate: *.%s ...%n", domain.name());
Utils.print(certificateOrder);
System.out.printf("Updating key vault %s with access from %s, %s%n", vaultName, clientId, SPRING_CLOUD_SERVICE_PRINCIPAL);
Vault vault = azure.vaults().getByResourceGroup(rgName, vaultName);
vault.update()
.defineAccessPolicy()
.forServicePrincipal(clientId)
.allowSecretAllPermissions()
.allowCertificateAllPermissions()
.attach()
.defineAccessPolicy()
.forServicePrincipal(SPRING_CLOUD_SERVICE_PRINCIPAL)
.allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST)
.allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST)
.attach()
.apply();
System.out.printf("Updated key vault %s%n", vault.name());
Utils.print(vault);
Secret secret = vault.secrets().getByName(certOrderName);
byte[] certificate = Base64.getDecoder().decode(secret.value());
String thumbprint = secret.tags().get("Thumbprint");
if (thumbprint == null || thumbprint.isEmpty()) {
KeyStore store = KeyStore.getInstance("PKCS12");
store.load(new ByteArrayInputStream(certificate), null);
String alias = Collections.list(store.aliases()).get(0);
thumbprint = DatatypeConverter.printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded()));
}
System.out.printf("Get certificate: %s%n", secret.value());
System.out.printf("Certificate Thumbprint: %s%n", thumbprint);
CertificateClient certificateClient = new CertificateClientBuilder()
.vaultUrl(vault.vaultUri())
.pipeline(service.manager().httpPipeline())
.buildClient();
System.out.printf("Uploading certificate to %s in key vault ...%n", certName);
certificateClient.importCertificate(
new ImportCertificateOptions(certName, certificate)
.setEnabled(true)
);
System.out.println("Updating Spring Cloud Service with certificate ...");
service.update()
.withCertificate(certName, vault.vaultUri(), certName)
.apply();
System.out.printf("Updating Spring Cloud App with domain ssl.%s ...", domainName);
gateway.update()
.withCustomDomain(String.format("ssl.%s", domainName), thumbprint)
.apply();
System.out.printf("Successfully expose domain ssl.%s", domainName);
return true;
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
} finally {
try {
System.out.println("Delete Resource Group: " + rgName);
azure.resourceGroups().beginDeleteByName(rgName);
} catch (NullPointerException npe) {
System.out.println("Did not create any resources in Azure. No clean up is necessary");
} catch (Exception g) {
g.printStackTrace();
}
}
return false;
}
/**
* Main entry point.
* @param args the parameters
*/
public static void main(String[] args) {
try {
final AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE);
final TokenCredential credential = new DefaultAzureCredentialBuilder()
.authorityHost(profile.environment().getActiveDirectoryEndpoint())
.build();
Azure azure = Azure
.configure()
.withLogLevel(HttpLogDetailLevel.BASIC)
.authenticate(credential, profile)
.withDefaultSubscription();
System.out.println("Selected subscription: " + azure.subscriptionId());
runSample(azure, Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_CLIENT_ID));
} catch (Exception e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
}
} |
done | private static void extraTarGzSource(File folder, URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) {
TarArchiveEntry entry;
while ((entry = inputStream.getNextTarEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
File file = new File(folder, entry.getName());
File parent = file.getParentFile();
if (!parent.exists()) {
parent.mkdirs();
}
try (OutputStream outputStream = new FileOutputStream(file)) {
IOUtils.copy(inputStream, outputStream);
}
}
}
connection.disconnect();
} | connection.disconnect(); | private static void extraTarGzSource(File folder, URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) {
TarArchiveEntry entry;
while ((entry = inputStream.getNextTarEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
File file = new File(folder, entry.getName());
File parent = file.getParentFile();
if (parent.exists() || parent.mkdirs()) {
try (OutputStream outputStream = new FileOutputStream(file)) {
IOUtils.copy(inputStream, outputStream);
}
} else {
throw new IllegalStateException("Cannot create directory: " + parent.getAbsolutePath());
}
}
} finally {
connection.disconnect();
}
} | class ManageSpringCloud {
private static final String PIGGYMETRICS_TAR_GZ_URL = "https:
private static final String SPRING_CLOUD_SERVICE_PRINCIPAL = "03b39d0f-4213-4864-a245-b1476ec03169";
/**
* Main function which runs the actual sample.
* @param azure instance of the azure client
* @param clientId the aad client id in azure instance
* @return true if sample runs successfully
*/
public static boolean runSample(Azure azure, String clientId) {
final String rgName = azure.sdkContext().randomResourceName("rg", 24);
final String serviceName = azure.sdkContext().randomResourceName("service", 24);
final Region region = Region.US_EAST;
final String domainName = azure.sdkContext().randomResourceName("jsdkdemo-", 20) + ".com";
final String certOrderName = azure.sdkContext().randomResourceName("cert", 15);
final String vaultName = azure.sdkContext().randomResourceName("vault", 15);
final String certName = azure.sdkContext().randomResourceName("cert", 15);
try {
azure.resourceGroups().define(rgName)
.withRegion(region)
.create();
System.out.printf("Creating spring cloud service %s in resource group %s ...%n", serviceName, rgName);
SpringService service = azure.springServices().define(serviceName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.create();
System.out.printf("Created spring cloud service %s%n", service.name());
Utils.print(service);
File sourceCodeFolder = new File("piggymetrics");
if (!sourceCodeFolder.exists() || sourceCodeFolder.isFile()) {
if (sourceCodeFolder.isFile() && !sourceCodeFolder.delete()) {
throw new IllegalStateException("could not delete piggymetrics file");
}
extraTarGzSource(sourceCodeFolder, new URL(PIGGYMETRICS_TAR_GZ_URL));
}
System.out.printf("Creating spring cloud app gateway in resource group %s ...%n", rgName);
SpringApp gateway = service.apps().define("gateway")
.withDefaultPublicEndpoint()
.withHttpsOnly()
.deploySource("default", sourceCodeFolder, "gateway")
.create();
System.out.println("Created spring cloud service gateway");
Utils.print(gateway);
System.out.printf("Creating spring cloud app auth-service in resource group %s ...%n", rgName);
SpringApp authService = service.apps().define("auth-service")
.deploySource("default", sourceCodeFolder, "auth-service")
.create();
System.out.println("Created spring cloud service auth-service");
Utils.print(authService);
System.out.printf("Creating spring cloud app account-service in resource group %s ...%n", rgName);
SpringApp accountService = service.apps().define("account-service")
.deploySource("default", sourceCodeFolder, "account-service")
.create();
System.out.println("Created spring cloud service account-service");
Utils.print(accountService);
System.out.println("Purchasing a domain " + domainName + "...");
AppServiceDomain domain = azure.appServiceDomains().define(domainName)
.withExistingResourceGroup(rgName)
.defineRegistrantContact()
.withFirstName("Jon")
.withLastName("Doe")
.withEmail("jondoe@contoso.com")
.withAddressLine1("123 4th Ave")
.withCity("Redmond")
.withStateOrProvince("WA")
.withCountry(CountryIsoCode.UNITED_STATES)
.withPostalCode("98052")
.withPhoneCountryCode(CountryPhoneCode.UNITED_STATES)
.withPhoneNumber("4258828080")
.attach()
.withDomainPrivacyEnabled(true)
.withAutoRenewEnabled(false)
.create();
System.out.println("Purchased domain " + domain.name());
Utils.print(domain);
DnsZone dnsZone = azure.dnsZones().getById(domain.dnsZoneId());
gateway.refresh();
System.out.printf("Updating dns with CNAME ssl.%s to %s%n", domainName, gateway.fqdn());
dnsZone.update()
.withCNameRecordSet("ssl", gateway.fqdn())
.apply();
System.out.printf("Purchasing a certificate for *.%s and save to %s in key vault named %s ...%n", domainName, certOrderName, vaultName);
AppServiceCertificateOrder certificateOrder = azure.appServiceCertificateOrders().define(certOrderName)
.withExistingResourceGroup(rgName)
.withHostName(String.format("*.%s", domainName))
.withWildcardSku()
.withDomainVerification(domain)
.withNewKeyVault(vaultName, region)
.withAutoRenew(true)
.create();
System.out.printf("Purchased certificate: *.%s ...%n", domain.name());
Utils.print(certificateOrder);
System.out.printf("Updating key vault %s with access from %s, %s%n", vaultName, clientId, SPRING_CLOUD_SERVICE_PRINCIPAL);
Vault vault = azure.vaults().getByResourceGroup(rgName, vaultName);
vault.update()
.defineAccessPolicy()
.forServicePrincipal(clientId)
.allowSecretAllPermissions()
.allowCertificateAllPermissions()
.attach()
.defineAccessPolicy()
.forServicePrincipal(SPRING_CLOUD_SERVICE_PRINCIPAL)
.allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST)
.allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST)
.attach()
.apply();
System.out.printf("Updated key vault %s%n", vault.name());
Utils.print(vault);
Secret secret = vault.secrets().getByName(certOrderName);
byte[] certificate = Base64.getDecoder().decode(secret.value());
String thumbprint = secret.tags().get("Thumbprint");
if (thumbprint == null || thumbprint.isEmpty()) {
KeyStore store = KeyStore.getInstance("PKCS12");
store.load(new ByteArrayInputStream(certificate), null);
String alias = Collections.list(store.aliases()).get(0);
thumbprint = DatatypeConverter.printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded()));
}
System.out.printf("Get certificate: %s%n", secret.value());
System.out.printf("Certificate Thumbprint: %s%n", thumbprint);
CertificateClient certificateClient = new CertificateClientBuilder()
.vaultUrl(vault.vaultUri())
.pipeline(service.manager().httpPipeline())
.buildClient();
System.out.printf("Uploading certificate to %s in key vault ...%n", certName);
certificateClient.importCertificate(
new ImportCertificateOptions(certName, certificate)
.setEnabled(true)
);
System.out.println("Updating Spring Cloud Service with certificate ...");
service.update()
.withCertificate(certName, vault.vaultUri(), certName)
.apply();
System.out.printf("Updating Spring Cloud App with domain ssl.%s ...", domainName);
gateway.update()
.withCustomDomain(String.format("ssl.%s", domainName), thumbprint)
.apply();
System.out.printf("Successfully expose domain ssl.%s", domainName);
return true;
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
} finally {
try {
System.out.println("Delete Resource Group: " + rgName);
azure.resourceGroups().beginDeleteByName(rgName);
} catch (NullPointerException npe) {
System.out.println("Did not create any resources in Azure. No clean up is necessary");
} catch (Exception g) {
g.printStackTrace();
}
}
return false;
}
/**
* Main entry point.
* @param args the parameters
*/
public static void main(String[] args) {
try {
final AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE);
final TokenCredential credential = new DefaultAzureCredentialBuilder()
.authorityHost(profile.environment().getActiveDirectoryEndpoint())
.build();
Azure azure = Azure
.configure()
.withLogLevel(HttpLogDetailLevel.BASIC)
.authenticate(credential, profile)
.withDefaultSubscription();
System.out.println("Selected subscription: " + azure.subscriptionId());
runSample(azure, Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_CLIENT_ID));
} catch (Exception e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
}
} | class ManageSpringCloud {
private static final String PIGGYMETRICS_TAR_GZ_URL = "https:
private static final String SPRING_CLOUD_SERVICE_PRINCIPAL = "03b39d0f-4213-4864-a245-b1476ec03169";
/**
* Main function which runs the actual sample.
* @param azure instance of the azure client
* @param clientId the aad client id in azure instance
* @return true if sample runs successfully
* @throws IllegalStateException unexcepted state
*/
public static boolean runSample(Azure azure, String clientId) {
final String rgName = azure.sdkContext().randomResourceName("rg", 24);
final String serviceName = azure.sdkContext().randomResourceName("service", 24);
final Region region = Region.US_EAST;
final String domainName = azure.sdkContext().randomResourceName("jsdkdemo-", 20) + ".com";
final String certOrderName = azure.sdkContext().randomResourceName("cert", 15);
final String vaultName = azure.sdkContext().randomResourceName("vault", 15);
final String certName = azure.sdkContext().randomResourceName("cert", 15);
try {
azure.resourceGroups().define(rgName)
.withRegion(region)
.create();
System.out.printf("Creating spring cloud service %s in resource group %s ...%n", serviceName, rgName);
SpringService service = azure.springServices().define(serviceName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.create();
System.out.printf("Created spring cloud service %s%n", service.name());
Utils.print(service);
File sourceCodeFolder = new File("piggymetrics");
if (!sourceCodeFolder.exists() || sourceCodeFolder.isFile()) {
if (sourceCodeFolder.isFile() && !sourceCodeFolder.delete()) {
throw new IllegalStateException("could not delete piggymetrics file");
}
extraTarGzSource(sourceCodeFolder, new URL(PIGGYMETRICS_TAR_GZ_URL));
}
System.out.printf("Creating spring cloud app gateway in resource group %s ...%n", rgName);
SpringApp gateway = service.apps().define("gateway")
.withDefaultPublicEndpoint()
.withHttpsOnly()
.deploySource("default", sourceCodeFolder, "gateway")
.create();
System.out.println("Created spring cloud service gateway");
Utils.print(gateway);
System.out.printf("Creating spring cloud app auth-service in resource group %s ...%n", rgName);
SpringApp authService = service.apps().define("auth-service")
.deploySource("default", sourceCodeFolder, "auth-service")
.create();
System.out.println("Created spring cloud service auth-service");
Utils.print(authService);
System.out.printf("Creating spring cloud app account-service in resource group %s ...%n", rgName);
SpringApp accountService = service.apps().define("account-service")
.deploySource("default", sourceCodeFolder, "account-service")
.create();
System.out.println("Created spring cloud service account-service");
Utils.print(accountService);
System.out.println("Purchasing a domain " + domainName + "...");
AppServiceDomain domain = azure.appServiceDomains().define(domainName)
.withExistingResourceGroup(rgName)
.defineRegistrantContact()
.withFirstName("Jon")
.withLastName("Doe")
.withEmail("jondoe@contoso.com")
.withAddressLine1("123 4th Ave")
.withCity("Redmond")
.withStateOrProvince("WA")
.withCountry(CountryIsoCode.UNITED_STATES)
.withPostalCode("98052")
.withPhoneCountryCode(CountryPhoneCode.UNITED_STATES)
.withPhoneNumber("4258828080")
.attach()
.withDomainPrivacyEnabled(true)
.withAutoRenewEnabled(false)
.create();
System.out.println("Purchased domain " + domain.name());
Utils.print(domain);
DnsZone dnsZone = azure.dnsZones().getById(domain.dnsZoneId());
gateway.refresh();
System.out.printf("Updating dns with CNAME ssl.%s to %s%n", domainName, gateway.fqdn());
dnsZone.update()
.withCNameRecordSet("ssl", gateway.fqdn())
.apply();
System.out.printf("Purchasing a certificate for *.%s and save to %s in key vault named %s ...%n", domainName, certOrderName, vaultName);
AppServiceCertificateOrder certificateOrder = azure.appServiceCertificateOrders().define(certOrderName)
.withExistingResourceGroup(rgName)
.withHostName(String.format("*.%s", domainName))
.withWildcardSku()
.withDomainVerification(domain)
.withNewKeyVault(vaultName, region)
.withAutoRenew(true)
.create();
System.out.printf("Purchased certificate: *.%s ...%n", domain.name());
Utils.print(certificateOrder);
System.out.printf("Updating key vault %s with access from %s, %s%n", vaultName, clientId, SPRING_CLOUD_SERVICE_PRINCIPAL);
Vault vault = azure.vaults().getByResourceGroup(rgName, vaultName);
vault.update()
.defineAccessPolicy()
.forServicePrincipal(clientId)
.allowSecretAllPermissions()
.allowCertificateAllPermissions()
.attach()
.defineAccessPolicy()
.forServicePrincipal(SPRING_CLOUD_SERVICE_PRINCIPAL)
.allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST)
.allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST)
.attach()
.apply();
System.out.printf("Updated key vault %s%n", vault.name());
Utils.print(vault);
Secret secret = vault.secrets().getByName(certOrderName);
byte[] certificate = Base64.getDecoder().decode(secret.value());
String thumbprint = secret.tags().get("Thumbprint");
if (thumbprint == null || thumbprint.isEmpty()) {
KeyStore store = KeyStore.getInstance("PKCS12");
store.load(new ByteArrayInputStream(certificate), null);
String alias = Collections.list(store.aliases()).get(0);
thumbprint = DatatypeConverter.printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded()));
}
System.out.printf("Get certificate: %s%n", secret.value());
System.out.printf("Certificate Thumbprint: %s%n", thumbprint);
CertificateClient certificateClient = new CertificateClientBuilder()
.vaultUrl(vault.vaultUri())
.pipeline(service.manager().httpPipeline())
.buildClient();
System.out.printf("Uploading certificate to %s in key vault ...%n", certName);
certificateClient.importCertificate(
new ImportCertificateOptions(certName, certificate)
.setEnabled(true)
);
System.out.println("Updating Spring Cloud Service with certificate ...");
service.update()
.withCertificate(certName, vault.vaultUri(), certName)
.apply();
System.out.printf("Updating Spring Cloud App with domain ssl.%s ...", domainName);
gateway.update()
.withCustomDomain(String.format("ssl.%s", domainName), thumbprint)
.apply();
System.out.printf("Successfully expose domain ssl.%s", domainName);
return true;
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
} finally {
try {
System.out.println("Delete Resource Group: " + rgName);
azure.resourceGroups().beginDeleteByName(rgName);
} catch (NullPointerException npe) {
System.out.println("Did not create any resources in Azure. No clean up is necessary");
} catch (Exception g) {
g.printStackTrace();
}
}
return false;
}
/**
* Main entry point.
* @param args the parameters
*/
public static void main(String[] args) {
try {
final AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE);
final TokenCredential credential = new DefaultAzureCredentialBuilder()
.authorityHost(profile.environment().getActiveDirectoryEndpoint())
.build();
Azure azure = Azure
.configure()
.withLogLevel(HttpLogDetailLevel.BASIC)
.authenticate(credential, profile)
.withDefaultSubscription();
System.out.println("Selected subscription: " + azure.subscriptionId());
runSample(azure, Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_CLIENT_ID));
} catch (Exception e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
}
} |
Any reason for commenting this out? Looks like the poller tests should not be impacted by this PR. | public void lroTimeout() {
final Duration timeoutDuration = Duration.ofMillis(1000);
final String resourceEndpoint = "/resource/1";
final AtomicInteger getCallCount = new AtomicInteger(0);
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT, RequestMethod.GET)) {
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount.getAndIncrement();
}
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
Mono<FooWithProvisioningState> resultMonoWithTimeout = lroFlux.last()
.flatMap(AsyncPollResponse::getFinalResult)
.timeout(timeoutDuration);
StepVerifier.create(resultMonoWithTimeout)
.thenAwait()
.verifyError(TimeoutException.class);
int count = getCallCount.get();
try {
Thread.sleep(timeoutDuration.toMillis());
} catch (InterruptedException e) {
}
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
} | public void lroTimeout() {
final Duration timeoutDuration = Duration.ofMillis(1000);
final String resourceEndpoint = "/resource/1";
final AtomicInteger getCallCount = new AtomicInteger(0);
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT, RequestMethod.GET)) {
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount.getAndIncrement();
}
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
Mono<FooWithProvisioningState> resultMonoWithTimeout = lroFlux.last()
.flatMap(AsyncPollResponse::getFinalResult)
.timeout(timeoutDuration);
StepVerifier.create(resultMonoWithTimeout)
.thenAwait()
.verifyError(TimeoutException.class);
int count = getCallCount.get();
try {
Thread.sleep(timeoutDuration.toMillis());
} catch (InterruptedException e) {
}
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
} | class LROPollerTests {
private static final SerializerAdapter SERIALIZER = new AzureJacksonAdapter();
private static final Duration POLLING_DURATION = Duration.ofMillis(100);
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Host("http:
@ServiceInterface(name = "ProvisioningStateLroService")
interface ProvisioningStateLroServiceClient {
@Put("/resource/1")
Mono<Response<Flux<ByteBuffer>>> startLro(Context context);
}
@Test
public void lroBasedOnProvisioningState() {
WireMockServer lroServer = startServer();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
Assertions.assertNull(pollResult.getValue().getResourceId());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
Assertions.assertNotNull(pollResult.getValue().getResourceId());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperation() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Succeeded\"}")
.build();
}
} else if (request.getUrl().endsWith(resourceEndpoint) && getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("Succeeded", UUID.randomUUID().toString())))
.build();
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
FooWithProvisioningState foo = pollResponse.getFinalResult().block();
Assertions.assertNotNull(foo.getResourceId());
Assertions.assertEquals("Succeeded", foo.getProvisioningState());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperationFailed() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Failed\"}")
.build();
}
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertNotNull(pollResult.getValue());
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.FAILED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
Assertions.assertEquals(LongRunningOperationStatus.FAILED, pollResponse.getStatus());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroSucceededNoPoll() {
final String resourceEndpoint = "/resource/1";
final String sampleVaultUpdateSucceededResponse = "{\"id\":\"/subscriptions/
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(200)
.body(sampleVaultUpdateSucceededResponse)
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<Resource>, Resource> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
Resource.class,
Resource.class,
POLLING_DURATION,
newLroInitFunction(client));
StepVerifier.create(lroFlux)
.expectSubscription()
.expectNextMatches(response -> {
PollResult<Resource> pollResult = response.getValue();
return response.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED
&& pollResult != null
&& pollResult.getValue() != null
&& pollResult.getValue().id() != null;
}).verifyComplete();
AsyncPollResponse<PollResult<Resource>, Resource> asyncPollResponse = lroFlux.blockLast();
Assertions.assertNotNull(asyncPollResponse);
Resource result = asyncPollResponse.getFinalResult().block();
Assertions.assertNotNull(result);
Assertions.assertNotNull(result.id());
Assertions.assertEquals("v1weidxu", result.name());
Assertions.assertEquals("Microsoft.KeyVault/vaults", result.type());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
@Test
public void lroRetryAfter() {
ServerConfigure configure = new ServerConfigure();
Duration expectedPollingDuration = Duration.ofSeconds(3);
configure.pollingCountTillSuccess = 3;
configure.additionalHeaders = new HttpHeaders(new HttpHeader("Retry-After", "1"));
WireMockServer lroServer = startServer(configure);
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
long nanoTime = System.nanoTime();
FooWithProvisioningState result = lroFlux
.doOnNext(response -> {
System.out.println(String.format("[%s] status %s",
OffsetDateTime.now().toString(), response.getStatus().toString()));
}).blockLast()
.getFinalResult().block();
Assertions.assertNotNull(result);
Duration pollingDuration = Duration.ofNanos(System.nanoTime() - nanoTime);
Assertions.assertTrue(pollingDuration.compareTo(expectedPollingDuration) > 0);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroContext() {
WireMockServer lroServer = startServer();
HttpPipelinePolicy contextVerifyPolicy = (context, next) -> {
Optional<Object> valueOpt = context.getData("key1");
if (valueOpt.isPresent() && "value1".equals(valueOpt.get())) {
return next.process();
} else {
return Mono.error(new AssertionError());
}
};
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port(), Collections.singletonList(contextVerifyPolicy)),
SERIALIZER);
Flux<AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState>> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
lroFlux = lroFlux.subscriberContext(context -> context.put("key1", "value1"));
FooWithProvisioningState result = lroFlux
.blockLast()
.getFinalResult()
.block();
Assertions.assertNotNull(result);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
private static class ServerConfigure {
private int pollingCountTillSuccess = 2;
private HttpHeaders additionalHeaders = HttpHeaders.noHeaders();
}
private static WireMockServer startServer() {
return startServer(new ServerConfigure());
}
private static WireMockServer startServer(ServerConfigure serverConfigure) {
final String resourceEndpoint = "/resource/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
System.out.println(String.format("[%s] PUT status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "SUCCEEDED"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("SUCCEEDED", UUID.randomUUID().toString())))
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
return lroServer;
}
private static WireMockServer createServer(ResponseTransformer transformer,
String... endpoints) {
WireMockServer server = new WireMockServer(WireMockConfiguration
.options()
.dynamicPort()
.extensions(transformer)
.disableRequestJournal());
for (String endpoint : endpoints) {
server.stubFor(WireMock.any(WireMock.urlEqualTo(endpoint))
.willReturn(WireMock.aResponse()));
}
return server;
}
private static HttpPipeline createHttpPipeline(int port) {
return createHttpPipeline(port, Collections.emptyList());
}
private static HttpPipeline createHttpPipeline(int port, List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<>(additionalPolicies);
policies.add(new HttpPipelinePolicy() {
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context,
HttpPipelineNextPolicy next) {
HttpRequest request = context.getHttpRequest();
request.setUrl(updatePort(request.getUrl(), port));
context.setHttpRequest(request);
return next.process();
}
private URL updatePort(URL url, int port) {
try {
return new URL(url.getProtocol(), url.getHost(), port, url.getFile());
} catch (MalformedURLException mue) {
throw new RuntimeException(mue);
}
}
});
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
private Mono<Response<Flux<ByteBuffer>>> newLroInitFunction(ProvisioningStateLroServiceClient client) {
return FluxUtil.fluxContext(context -> client.startLro(context).flux()).next();
}
private static String toJson(Object object) {
try {
return SERIALIZER.serialize(object, SerializerEncoding.JSON);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
} | class LROPollerTests {
private static final SerializerAdapter SERIALIZER = new AzureJacksonAdapter();
private static final Duration POLLING_DURATION = Duration.ofMillis(100);
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Host("http:
@ServiceInterface(name = "ProvisioningStateLroService")
interface ProvisioningStateLroServiceClient {
@Put("/resource/1")
Mono<Response<Flux<ByteBuffer>>> startLro(Context context);
}
@Test
public void lroBasedOnProvisioningState() {
WireMockServer lroServer = startServer();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
Assertions.assertNull(pollResult.getValue().getResourceId());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
Assertions.assertNotNull(pollResult.getValue().getResourceId());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperation() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Succeeded\"}")
.build();
}
} else if (request.getUrl().endsWith(resourceEndpoint) && getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("Succeeded", UUID.randomUUID().toString())))
.build();
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
FooWithProvisioningState foo = pollResponse.getFinalResult().block();
Assertions.assertNotNull(foo.getResourceId());
Assertions.assertEquals("Succeeded", foo.getProvisioningState());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperationFailed() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Failed\"}")
.build();
}
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertNotNull(pollResult.getValue());
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.FAILED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
Assertions.assertEquals(LongRunningOperationStatus.FAILED, pollResponse.getStatus());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroSucceededNoPoll() {
final String resourceEndpoint = "/resource/1";
final String sampleVaultUpdateSucceededResponse = "{\"id\":\"/subscriptions/
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(200)
.body(sampleVaultUpdateSucceededResponse)
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<Resource>, Resource> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
Resource.class,
Resource.class,
POLLING_DURATION,
newLroInitFunction(client));
StepVerifier.create(lroFlux)
.expectSubscription()
.expectNextMatches(response -> {
PollResult<Resource> pollResult = response.getValue();
return response.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED
&& pollResult != null
&& pollResult.getValue() != null
&& pollResult.getValue().id() != null;
}).verifyComplete();
AsyncPollResponse<PollResult<Resource>, Resource> asyncPollResponse = lroFlux.blockLast();
Assertions.assertNotNull(asyncPollResponse);
Resource result = asyncPollResponse.getFinalResult().block();
Assertions.assertNotNull(result);
Assertions.assertNotNull(result.id());
Assertions.assertEquals("v1weidxu", result.name());
Assertions.assertEquals("Microsoft.KeyVault/vaults", result.type());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
@Test
public void lroRetryAfter() {
ServerConfigure configure = new ServerConfigure();
Duration expectedPollingDuration = Duration.ofSeconds(3);
configure.pollingCountTillSuccess = 3;
configure.additionalHeaders = new HttpHeaders(new HttpHeader("Retry-After", "1"));
WireMockServer lroServer = startServer(configure);
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
long nanoTime = System.nanoTime();
FooWithProvisioningState result = lroFlux
.doOnNext(response -> {
System.out.println(String.format("[%s] status %s",
OffsetDateTime.now().toString(), response.getStatus().toString()));
}).blockLast()
.getFinalResult().block();
Assertions.assertNotNull(result);
Duration pollingDuration = Duration.ofNanos(System.nanoTime() - nanoTime);
Assertions.assertTrue(pollingDuration.compareTo(expectedPollingDuration) > 0);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroContext() {
WireMockServer lroServer = startServer();
HttpPipelinePolicy contextVerifyPolicy = (context, next) -> {
Optional<Object> valueOpt = context.getData("key1");
if (valueOpt.isPresent() && "value1".equals(valueOpt.get())) {
return next.process();
} else {
return Mono.error(new AssertionError());
}
};
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port(), Collections.singletonList(contextVerifyPolicy)),
SERIALIZER);
Flux<AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState>> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
lroFlux = lroFlux.subscriberContext(context -> context.put("key1", "value1"));
FooWithProvisioningState result = lroFlux
.blockLast()
.getFinalResult()
.block();
Assertions.assertNotNull(result);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
private static class ServerConfigure {
private int pollingCountTillSuccess = 2;
private HttpHeaders additionalHeaders = HttpHeaders.noHeaders();
}
private static WireMockServer startServer() {
return startServer(new ServerConfigure());
}
private static WireMockServer startServer(ServerConfigure serverConfigure) {
final String resourceEndpoint = "/resource/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
System.out.println(String.format("[%s] PUT status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "SUCCEEDED"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("SUCCEEDED", UUID.randomUUID().toString())))
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
return lroServer;
}
private static WireMockServer createServer(ResponseTransformer transformer,
String... endpoints) {
WireMockServer server = new WireMockServer(WireMockConfiguration
.options()
.dynamicPort()
.extensions(transformer)
.disableRequestJournal());
for (String endpoint : endpoints) {
server.stubFor(WireMock.any(WireMock.urlEqualTo(endpoint))
.willReturn(WireMock.aResponse()));
}
return server;
}
private static HttpPipeline createHttpPipeline(int port) {
return createHttpPipeline(port, Collections.emptyList());
}
private static HttpPipeline createHttpPipeline(int port, List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<>(additionalPolicies);
policies.add(new HttpPipelinePolicy() {
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context,
HttpPipelineNextPolicy next) {
HttpRequest request = context.getHttpRequest();
request.setUrl(updatePort(request.getUrl(), port));
context.setHttpRequest(request);
return next.process();
}
private URL updatePort(URL url, int port) {
try {
return new URL(url.getProtocol(), url.getHost(), port, url.getFile());
} catch (MalformedURLException mue) {
throw new RuntimeException(mue);
}
}
});
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
private Mono<Response<Flux<ByteBuffer>>> newLroInitFunction(ProvisioningStateLroServiceClient client) {
return FluxUtil.fluxContext(context -> client.startLro(context).flux()).next();
}
private static String toJson(Object object) {
try {
return SERIALIZER.serialize(object, SerializerEncoding.JSON);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
} | |
These have been fixed and merged in after the commit this branch was checked out from. Cherry picked to make it so CI fails less. | public void lroTimeout() {
final Duration timeoutDuration = Duration.ofMillis(1000);
final String resourceEndpoint = "/resource/1";
final AtomicInteger getCallCount = new AtomicInteger(0);
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT, RequestMethod.GET)) {
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount.getAndIncrement();
}
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
Mono<FooWithProvisioningState> resultMonoWithTimeout = lroFlux.last()
.flatMap(AsyncPollResponse::getFinalResult)
.timeout(timeoutDuration);
StepVerifier.create(resultMonoWithTimeout)
.thenAwait()
.verifyError(TimeoutException.class);
int count = getCallCount.get();
try {
Thread.sleep(timeoutDuration.toMillis());
} catch (InterruptedException e) {
}
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
} | public void lroTimeout() {
final Duration timeoutDuration = Duration.ofMillis(1000);
final String resourceEndpoint = "/resource/1";
final AtomicInteger getCallCount = new AtomicInteger(0);
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT, RequestMethod.GET)) {
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount.getAndIncrement();
}
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
Mono<FooWithProvisioningState> resultMonoWithTimeout = lroFlux.last()
.flatMap(AsyncPollResponse::getFinalResult)
.timeout(timeoutDuration);
StepVerifier.create(resultMonoWithTimeout)
.thenAwait()
.verifyError(TimeoutException.class);
int count = getCallCount.get();
try {
Thread.sleep(timeoutDuration.toMillis());
} catch (InterruptedException e) {
}
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
} | class LROPollerTests {
private static final SerializerAdapter SERIALIZER = new AzureJacksonAdapter();
private static final Duration POLLING_DURATION = Duration.ofMillis(100);
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Host("http:
@ServiceInterface(name = "ProvisioningStateLroService")
interface ProvisioningStateLroServiceClient {
@Put("/resource/1")
Mono<Response<Flux<ByteBuffer>>> startLro(Context context);
}
@Test
public void lroBasedOnProvisioningState() {
WireMockServer lroServer = startServer();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
Assertions.assertNull(pollResult.getValue().getResourceId());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
Assertions.assertNotNull(pollResult.getValue().getResourceId());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperation() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Succeeded\"}")
.build();
}
} else if (request.getUrl().endsWith(resourceEndpoint) && getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("Succeeded", UUID.randomUUID().toString())))
.build();
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
FooWithProvisioningState foo = pollResponse.getFinalResult().block();
Assertions.assertNotNull(foo.getResourceId());
Assertions.assertEquals("Succeeded", foo.getProvisioningState());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperationFailed() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Failed\"}")
.build();
}
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertNotNull(pollResult.getValue());
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.FAILED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
Assertions.assertEquals(LongRunningOperationStatus.FAILED, pollResponse.getStatus());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroSucceededNoPoll() {
final String resourceEndpoint = "/resource/1";
final String sampleVaultUpdateSucceededResponse = "{\"id\":\"/subscriptions/
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(200)
.body(sampleVaultUpdateSucceededResponse)
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<Resource>, Resource> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
Resource.class,
Resource.class,
POLLING_DURATION,
newLroInitFunction(client));
StepVerifier.create(lroFlux)
.expectSubscription()
.expectNextMatches(response -> {
PollResult<Resource> pollResult = response.getValue();
return response.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED
&& pollResult != null
&& pollResult.getValue() != null
&& pollResult.getValue().id() != null;
}).verifyComplete();
AsyncPollResponse<PollResult<Resource>, Resource> asyncPollResponse = lroFlux.blockLast();
Assertions.assertNotNull(asyncPollResponse);
Resource result = asyncPollResponse.getFinalResult().block();
Assertions.assertNotNull(result);
Assertions.assertNotNull(result.id());
Assertions.assertEquals("v1weidxu", result.name());
Assertions.assertEquals("Microsoft.KeyVault/vaults", result.type());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
@Test
public void lroRetryAfter() {
ServerConfigure configure = new ServerConfigure();
Duration expectedPollingDuration = Duration.ofSeconds(3);
configure.pollingCountTillSuccess = 3;
configure.additionalHeaders = new HttpHeaders(new HttpHeader("Retry-After", "1"));
WireMockServer lroServer = startServer(configure);
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
long nanoTime = System.nanoTime();
FooWithProvisioningState result = lroFlux
.doOnNext(response -> {
System.out.println(String.format("[%s] status %s",
OffsetDateTime.now().toString(), response.getStatus().toString()));
}).blockLast()
.getFinalResult().block();
Assertions.assertNotNull(result);
Duration pollingDuration = Duration.ofNanos(System.nanoTime() - nanoTime);
Assertions.assertTrue(pollingDuration.compareTo(expectedPollingDuration) > 0);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroContext() {
WireMockServer lroServer = startServer();
HttpPipelinePolicy contextVerifyPolicy = (context, next) -> {
Optional<Object> valueOpt = context.getData("key1");
if (valueOpt.isPresent() && "value1".equals(valueOpt.get())) {
return next.process();
} else {
return Mono.error(new AssertionError());
}
};
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port(), Collections.singletonList(contextVerifyPolicy)),
SERIALIZER);
Flux<AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState>> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
lroFlux = lroFlux.subscriberContext(context -> context.put("key1", "value1"));
FooWithProvisioningState result = lroFlux
.blockLast()
.getFinalResult()
.block();
Assertions.assertNotNull(result);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
private static class ServerConfigure {
private int pollingCountTillSuccess = 2;
private HttpHeaders additionalHeaders = HttpHeaders.noHeaders();
}
private static WireMockServer startServer() {
return startServer(new ServerConfigure());
}
private static WireMockServer startServer(ServerConfigure serverConfigure) {
final String resourceEndpoint = "/resource/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
System.out.println(String.format("[%s] PUT status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "SUCCEEDED"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("SUCCEEDED", UUID.randomUUID().toString())))
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
return lroServer;
}
private static WireMockServer createServer(ResponseTransformer transformer,
String... endpoints) {
WireMockServer server = new WireMockServer(WireMockConfiguration
.options()
.dynamicPort()
.extensions(transformer)
.disableRequestJournal());
for (String endpoint : endpoints) {
server.stubFor(WireMock.any(WireMock.urlEqualTo(endpoint))
.willReturn(WireMock.aResponse()));
}
return server;
}
private static HttpPipeline createHttpPipeline(int port) {
return createHttpPipeline(port, Collections.emptyList());
}
private static HttpPipeline createHttpPipeline(int port, List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<>(additionalPolicies);
policies.add(new HttpPipelinePolicy() {
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context,
HttpPipelineNextPolicy next) {
HttpRequest request = context.getHttpRequest();
request.setUrl(updatePort(request.getUrl(), port));
context.setHttpRequest(request);
return next.process();
}
private URL updatePort(URL url, int port) {
try {
return new URL(url.getProtocol(), url.getHost(), port, url.getFile());
} catch (MalformedURLException mue) {
throw new RuntimeException(mue);
}
}
});
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
private Mono<Response<Flux<ByteBuffer>>> newLroInitFunction(ProvisioningStateLroServiceClient client) {
return FluxUtil.fluxContext(context -> client.startLro(context).flux()).next();
}
private static String toJson(Object object) {
try {
return SERIALIZER.serialize(object, SerializerEncoding.JSON);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
} | class LROPollerTests {
private static final SerializerAdapter SERIALIZER = new AzureJacksonAdapter();
private static final Duration POLLING_DURATION = Duration.ofMillis(100);
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Host("http:
@ServiceInterface(name = "ProvisioningStateLroService")
interface ProvisioningStateLroServiceClient {
@Put("/resource/1")
Mono<Response<Flux<ByteBuffer>>> startLro(Context context);
}
@Test
public void lroBasedOnProvisioningState() {
WireMockServer lroServer = startServer();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
Assertions.assertNull(pollResult.getValue().getResourceId());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
Assertions.assertNotNull(pollResult.getValue().getResourceId());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperation() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Succeeded\"}")
.build();
}
} else if (request.getUrl().endsWith(resourceEndpoint) && getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("Succeeded", UUID.randomUUID().toString())))
.build();
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
Assertions.assertNotNull(pollResult.getValue());
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
FooWithProvisioningState foo = pollResponse.getFinalResult().block();
Assertions.assertNotNull(foo.getResourceId());
Assertions.assertEquals("Succeeded", foo.getProvisioningState());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroBasedOnAsyncOperationFailed() {
ServerConfigure serverConfigure = new ServerConfigure();
final String resourceEndpoint = "/resource/1";
final String operationEndpoint = "/operations/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint) && !request.getUrl().endsWith(operationEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(new HttpHeaders(
new HttpHeader("Azure-AsyncOperation", request.getAbsoluteUrl().replace(resourceEndpoint, operationEndpoint))))
.body(toJson(new FooWithProvisioningState("Creating")))
.status(201)
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
if (request.getUrl().endsWith(operationEndpoint)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"InProgress\"}")
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body("{\"status\": \"Failed\"}")
.build();
}
} else {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(400)
.body("Invalid state:" + request.getUrl())
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint, operationEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
int[] onNextCallCount = new int[1];
AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState> pollResponse = lroFlux.doOnNext(response -> {
PollResult<FooWithProvisioningState> pollResult = response.getValue();
Assertions.assertNotNull(pollResult);
onNextCallCount[0]++;
if (onNextCallCount[0] == 1) {
Assertions.assertNotNull(pollResult.getValue());
Assertions.assertEquals(LongRunningOperationStatus.IN_PROGRESS,
response.getStatus());
} else if (onNextCallCount[0] == 2) {
Assertions.assertEquals(LongRunningOperationStatus.FAILED,
response.getStatus());
} else {
throw new IllegalStateException("Poller emitted more than expected value.");
}
}).blockLast();
Assertions.assertEquals(LongRunningOperationStatus.FAILED, pollResponse.getStatus());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroSucceededNoPoll() {
final String resourceEndpoint = "/resource/1";
final String sampleVaultUpdateSucceededResponse = "{\"id\":\"/subscriptions/
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(200)
.body(sampleVaultUpdateSucceededResponse)
.build();
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<Resource>, Resource> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
Resource.class,
Resource.class,
POLLING_DURATION,
newLroInitFunction(client));
StepVerifier.create(lroFlux)
.expectSubscription()
.expectNextMatches(response -> {
PollResult<Resource> pollResult = response.getValue();
return response.getStatus() == LongRunningOperationStatus.SUCCESSFULLY_COMPLETED
&& pollResult != null
&& pollResult.getValue() != null
&& pollResult.getValue().id() != null;
}).verifyComplete();
AsyncPollResponse<PollResult<Resource>, Resource> asyncPollResponse = lroFlux.blockLast();
Assertions.assertNotNull(asyncPollResponse);
Resource result = asyncPollResponse.getFinalResult().block();
Assertions.assertNotNull(result);
Assertions.assertNotNull(result.id());
Assertions.assertEquals("v1weidxu", result.name());
Assertions.assertEquals("Microsoft.KeyVault/vaults", result.type());
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
@Test
public void lroRetryAfter() {
ServerConfigure configure = new ServerConfigure();
Duration expectedPollingDuration = Duration.ofSeconds(3);
configure.pollingCountTillSuccess = 3;
configure.additionalHeaders = new HttpHeaders(new HttpHeader("Retry-After", "1"));
WireMockServer lroServer = startServer(configure);
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port()),
SERIALIZER);
PollerFlux<PollResult<FooWithProvisioningState>, FooWithProvisioningState> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
long nanoTime = System.nanoTime();
FooWithProvisioningState result = lroFlux
.doOnNext(response -> {
System.out.println(String.format("[%s] status %s",
OffsetDateTime.now().toString(), response.getStatus().toString()));
}).blockLast()
.getFinalResult().block();
Assertions.assertNotNull(result);
Duration pollingDuration = Duration.ofNanos(System.nanoTime() - nanoTime);
Assertions.assertTrue(pollingDuration.compareTo(expectedPollingDuration) > 0);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
@Test
public void lroContext() {
WireMockServer lroServer = startServer();
HttpPipelinePolicy contextVerifyPolicy = (context, next) -> {
Optional<Object> valueOpt = context.getData("key1");
if (valueOpt.isPresent() && "value1".equals(valueOpt.get())) {
return next.process();
} else {
return Mono.error(new AssertionError());
}
};
try {
final ProvisioningStateLroServiceClient client = RestProxy.create(ProvisioningStateLroServiceClient.class,
createHttpPipeline(lroServer.port(), Collections.singletonList(contextVerifyPolicy)),
SERIALIZER);
Flux<AsyncPollResponse<PollResult<FooWithProvisioningState>, FooWithProvisioningState>> lroFlux
= PollerFactory.create(SERIALIZER,
new HttpPipelineBuilder().build(),
FooWithProvisioningState.class,
FooWithProvisioningState.class,
POLLING_DURATION,
newLroInitFunction(client));
lroFlux = lroFlux.subscriberContext(context -> context.put("key1", "value1"));
FooWithProvisioningState result = lroFlux
.blockLast()
.getFinalResult()
.block();
Assertions.assertNotNull(result);
} finally {
if (lroServer.isRunning()) {
lroServer.shutdown();
}
}
}
private static class ServerConfigure {
private int pollingCountTillSuccess = 2;
private HttpHeaders additionalHeaders = HttpHeaders.noHeaders();
}
private static WireMockServer startServer() {
return startServer(new ServerConfigure());
}
private static WireMockServer startServer(ServerConfigure serverConfigure) {
final String resourceEndpoint = "/resource/1";
ResponseTransformer provisioningStateLroService = new ResponseTransformer() {
private final int[] getCallCount = new int[1];
@Override
public com.github.tomakehurst.wiremock.http.Response transform(Request request,
com.github.tomakehurst.wiremock.http.Response response,
FileSource fileSource,
Parameters parameters) {
if (!request.getUrl().endsWith(resourceEndpoint)) {
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.status(500)
.body("Unsupported path:" + request.getUrl())
.build();
}
if (request.getMethod().isOneOf(RequestMethod.PUT)) {
System.out.println(String.format("[%s] PUT status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
}
if (request.getMethod().isOneOf(RequestMethod.GET)) {
getCallCount[0]++;
if (getCallCount[0] < serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "IN_PROGRESS"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.headers(serverConfigure.additionalHeaders)
.body(toJson(new FooWithProvisioningState("IN_PROGRESS")))
.build();
} else if (getCallCount[0] == serverConfigure.pollingCountTillSuccess) {
System.out.println(String.format("[%s] GET status %s",
OffsetDateTime.now().toString(), "SUCCEEDED"));
return new com.github.tomakehurst.wiremock.http.Response.Builder()
.body(toJson(new FooWithProvisioningState("SUCCEEDED", UUID.randomUUID().toString())))
.build();
}
}
return response;
}
@Override
public String getName() {
return "LroService";
}
};
WireMockServer lroServer = createServer(provisioningStateLroService, resourceEndpoint);
lroServer.start();
return lroServer;
}
private static WireMockServer createServer(ResponseTransformer transformer,
String... endpoints) {
WireMockServer server = new WireMockServer(WireMockConfiguration
.options()
.dynamicPort()
.extensions(transformer)
.disableRequestJournal());
for (String endpoint : endpoints) {
server.stubFor(WireMock.any(WireMock.urlEqualTo(endpoint))
.willReturn(WireMock.aResponse()));
}
return server;
}
private static HttpPipeline createHttpPipeline(int port) {
return createHttpPipeline(port, Collections.emptyList());
}
private static HttpPipeline createHttpPipeline(int port, List<HttpPipelinePolicy> additionalPolicies) {
List<HttpPipelinePolicy> policies = new ArrayList<>(additionalPolicies);
policies.add(new HttpPipelinePolicy() {
@Override
public Mono<HttpResponse> process(HttpPipelineCallContext context,
HttpPipelineNextPolicy next) {
HttpRequest request = context.getHttpRequest();
request.setUrl(updatePort(request.getUrl(), port));
context.setHttpRequest(request);
return next.process();
}
private URL updatePort(URL url, int port) {
try {
return new URL(url.getProtocol(), url.getHost(), port, url.getFile());
} catch (MalformedURLException mue) {
throw new RuntimeException(mue);
}
}
});
return new HttpPipelineBuilder()
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
}
private Mono<Response<Flux<ByteBuffer>>> newLroInitFunction(ProvisioningStateLroServiceClient client) {
return FluxUtil.fluxContext(context -> client.startLro(context).flux()).next();
}
private static String toJson(Object object) {
try {
return SERIALIZER.serialize(object, SerializerEncoding.JSON);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
} | |
This should be an NPE rather than IllegalArgument. https://azure.github.io/azure-sdk/java_implementation.html#java-errors-system-errors | boolean tryAdd(final EventData eventData) {
if (eventData == null) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("eventData cannot be null"));
}
EventData event = tracerProvider.isEnabled() ? traceMessageSpan(eventData) : eventData;
final int size;
try {
size = getSize(event, events.isEmpty());
} catch (BufferOverflowException exception) {
throw logger.logExceptionAsWarning(new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,
String.format(Locale.US, "Size of the payload exceeded maximum message size: %s kb",
maxMessageSize / 1024),
contextProvider.getErrorContext()));
}
synchronized (lock) {
if (this.sizeInBytes + size > this.maxMessageSize) {
return false;
}
this.sizeInBytes += size;
}
this.events.add(event);
return true;
} | throw logger.logExceptionAsWarning(new IllegalArgumentException("eventData cannot be null")); | boolean tryAdd(final EventData eventData) {
if (eventData == null) {
throw logger.logExceptionAsWarning(new NullPointerException("eventData cannot be null"));
}
EventData event = tracerProvider.isEnabled() ? traceMessageSpan(eventData) : eventData;
final int size;
try {
size = getSize(event, events.isEmpty());
} catch (BufferOverflowException exception) {
throw logger.logExceptionAsWarning(new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,
String.format(Locale.US, "Size of the payload exceeded maximum message size: %s kb",
maxMessageSize / 1024),
contextProvider.getErrorContext()));
}
synchronized (lock) {
if (this.sizeInBytes + size > this.maxMessageSize) {
return false;
}
this.sizeInBytes += size;
}
this.events.add(event);
return true;
} | class EventDataBatchBase {
private final ClientLogger logger = new ClientLogger(this.getClass());
private final Object lock = new Object();
private final int maxMessageSize;
private final String partitionKey;
private final ErrorContextProvider contextProvider;
private final List<EventData> events;
private final byte[] eventBytes;
private final String partitionId;
private int sizeInBytes;
private final TracerProvider tracerProvider;
private final String entityPath;
private final String hostname;
EventDataBatchBase(int maxMessageSize, String partitionId, String partitionKey,
ErrorContextProvider contextProvider, TracerProvider tracerProvider, String entityPath,
String hostname) {
this.maxMessageSize = maxMessageSize;
this.partitionKey = partitionKey;
this.partitionId = partitionId;
this.contextProvider = contextProvider;
this.events = new LinkedList<>();
this.sizeInBytes = (maxMessageSize / 65536) * 1024;
this.eventBytes = new byte[maxMessageSize];
this.tracerProvider = tracerProvider;
this.entityPath = entityPath;
this.hostname = hostname;
}
/**
* Gets the number of {@link EventData events} in the batch.
*
* @return The number of {@link EventData events} in the batch.
*/
public int getCount() {
return events.size();
}
/**
* Gets the maximum size, in bytes, of the {@link EventDataBatch}.
*
* @return The maximum size, in bytes, of the {@link EventDataBatch}.
*/
public int getMaxSizeInBytes() {
return maxMessageSize;
}
/**
* Gets the size of the {@link EventDataBatch} in bytes.
*
* @return the size of the {@link EventDataBatch} in bytes.
*/
public int getSizeInBytes() {
return this.sizeInBytes;
}
/**
* Tries to add an {@link EventData event} to the batch.
*
* @param eventData The {@link EventData} to add to the batch.
* @return {@code true} if the event could be added to the batch; {@code false} if the event was too large to fit in
* the batch.
* @throws IllegalArgumentException if {@code eventData} is {@code null}.
* @throws AmqpException if {@code eventData} is larger than the maximum size of the {@link EventDataBatch}.
*/
/**
* Method to start and end a "Azure.EventHubs.message" span and add the "DiagnosticId" as a property of the message.
*
* @param eventData The Event to add tracing span for.
* @return the updated event data object.
*/
EventData traceMessageSpan(EventData eventData) {
Optional<Object> eventContextData = eventData.getContext().getData(SPAN_CONTEXT_KEY);
if (eventContextData.isPresent()) {
return eventData;
} else {
Context eventContext = eventData.getContext()
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE)
.addData(ENTITY_PATH_KEY, this.entityPath)
.addData(HOST_NAME_KEY, this.hostname);
Context eventSpanContext = tracerProvider.startSpan(eventContext, ProcessKind.MESSAGE);
Optional<Object> eventDiagnosticIdOptional = eventSpanContext.getData(DIAGNOSTIC_ID_KEY);
if (eventDiagnosticIdOptional.isPresent()) {
eventData.getProperties().put(DIAGNOSTIC_ID_KEY, eventDiagnosticIdOptional.get().toString());
tracerProvider.endSpan(eventSpanContext, Signal.complete());
eventData.addContext(SPAN_CONTEXT_KEY, eventSpanContext);
}
}
return eventData;
}
List<EventData> getEvents() {
return events;
}
String getPartitionKey() {
return partitionKey;
}
String getPartitionId() {
return partitionId;
}
int getSize(final EventData eventData, final boolean isFirst) {
Objects.requireNonNull(eventData, "'eventData' cannot be null.");
final Message amqpMessage = createAmqpMessage(eventData, partitionKey);
int eventSize = amqpMessage.encode(this.eventBytes, 0, maxMessageSize);
eventSize += 16;
if (isFirst) {
amqpMessage.setBody(null);
amqpMessage.setApplicationProperties(null);
amqpMessage.setProperties(null);
amqpMessage.setDeliveryAnnotations(null);
eventSize += amqpMessage.encode(this.eventBytes, 0, maxMessageSize);
}
return eventSize;
}
/*
* Creates the AMQP message represented by the event data
*/
private Message createAmqpMessage(EventData event, String partitionKey) {
final Message message = Proton.message();
if (event.getProperties() != null && !event.getProperties().isEmpty()) {
final ApplicationProperties applicationProperties = new ApplicationProperties(event.getProperties());
message.setApplicationProperties(applicationProperties);
}
if (event.getSystemProperties() != null) {
event.getSystemProperties().forEach((key, value) -> {
if (EventData.RESERVED_SYSTEM_PROPERTIES.contains(key)) {
return;
}
final AmqpMessageConstant constant = AmqpMessageConstant.fromString(key);
if (constant != null) {
switch (constant) {
case MESSAGE_ID:
message.setMessageId(value);
break;
case USER_ID:
message.setUserId((byte[]) value);
break;
case TO:
message.setAddress((String) value);
break;
case SUBJECT:
message.setSubject((String) value);
break;
case REPLY_TO:
message.setReplyTo((String) value);
break;
case CORRELATION_ID:
message.setCorrelationId(value);
break;
case CONTENT_TYPE:
message.setContentType((String) value);
break;
case CONTENT_ENCODING:
message.setContentEncoding((String) value);
break;
case ABSOLUTE_EXPIRY_TIME:
message.setExpiryTime((long) value);
break;
case CREATION_TIME:
message.setCreationTime((long) value);
break;
case GROUP_ID:
message.setGroupId((String) value);
break;
case GROUP_SEQUENCE:
message.setGroupSequence((long) value);
break;
case REPLY_TO_GROUP_ID:
message.setReplyToGroupId((String) value);
break;
default:
throw logger.logExceptionAsWarning(new IllegalArgumentException(String.format(Locale.US,
"Property is not a recognized reserved property name: %s", key)));
}
} else {
final MessageAnnotations messageAnnotations = (message.getMessageAnnotations() == null)
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(Symbol.getSymbol(key), value);
message.setMessageAnnotations(messageAnnotations);
}
});
}
if (partitionKey != null) {
final MessageAnnotations messageAnnotations = (message.getMessageAnnotations() == null)
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey);
message.setMessageAnnotations(messageAnnotations);
}
message.setBody(new Data(new Binary(event.getBody())));
return message;
}
} | class EventDataBatchBase {
private final ClientLogger logger = new ClientLogger(this.getClass());
private final Object lock = new Object();
private final int maxMessageSize;
private final String partitionKey;
private final ErrorContextProvider contextProvider;
private final List<EventData> events;
private final byte[] eventBytes;
private final String partitionId;
private int sizeInBytes;
private final TracerProvider tracerProvider;
private final String entityPath;
private final String hostname;
EventDataBatchBase(int maxMessageSize, String partitionId, String partitionKey,
ErrorContextProvider contextProvider, TracerProvider tracerProvider, String entityPath,
String hostname) {
this.maxMessageSize = maxMessageSize;
this.partitionKey = partitionKey;
this.partitionId = partitionId;
this.contextProvider = contextProvider;
this.events = new LinkedList<>();
this.sizeInBytes = (maxMessageSize / 65536) * 1024;
this.eventBytes = new byte[maxMessageSize];
this.tracerProvider = tracerProvider;
this.entityPath = entityPath;
this.hostname = hostname;
}
/**
* Gets the number of {@link EventData events} in the batch.
*
* @return The number of {@link EventData events} in the batch.
*/
public int getCount() {
return events.size();
}
/**
* Gets the maximum size, in bytes, of the {@link EventDataBatch}.
*
* @return The maximum size, in bytes, of the {@link EventDataBatch}.
*/
public int getMaxSizeInBytes() {
return maxMessageSize;
}
/**
* Gets the size of the {@link EventDataBatch} in bytes.
*
* @return the size of the {@link EventDataBatch} in bytes.
*/
public int getSizeInBytes() {
return this.sizeInBytes;
}
/**
* Tries to add an {@link EventData event} to the batch.
*
* @param eventData The {@link EventData} to add to the batch.
* @return {@code true} if the event could be added to the batch; {@code false} if the event was too large to fit in
* the batch.
* @throws IllegalArgumentException if {@code eventData} is {@code null}.
* @throws AmqpException if {@code eventData} is larger than the maximum size of the {@link EventDataBatch}.
*/
/**
* Method to start and end a "Azure.EventHubs.message" span and add the "DiagnosticId" as a property of the message.
*
* @param eventData The Event to add tracing span for.
* @return the updated event data object.
*/
EventData traceMessageSpan(EventData eventData) {
Optional<Object> eventContextData = eventData.getContext().getData(SPAN_CONTEXT_KEY);
if (eventContextData.isPresent()) {
return eventData;
} else {
Context eventContext = eventData.getContext()
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE)
.addData(ENTITY_PATH_KEY, this.entityPath)
.addData(HOST_NAME_KEY, this.hostname);
Context eventSpanContext = tracerProvider.startSpan(eventContext, ProcessKind.MESSAGE);
Optional<Object> eventDiagnosticIdOptional = eventSpanContext.getData(DIAGNOSTIC_ID_KEY);
if (eventDiagnosticIdOptional.isPresent()) {
eventData.getProperties().put(DIAGNOSTIC_ID_KEY, eventDiagnosticIdOptional.get().toString());
tracerProvider.endSpan(eventSpanContext, Signal.complete());
eventData.addContext(SPAN_CONTEXT_KEY, eventSpanContext);
}
}
return eventData;
}
List<EventData> getEvents() {
return events;
}
String getPartitionKey() {
return partitionKey;
}
String getPartitionId() {
return partitionId;
}
int getSize(final EventData eventData, final boolean isFirst) {
Objects.requireNonNull(eventData, "'eventData' cannot be null.");
final Message amqpMessage = createAmqpMessage(eventData, partitionKey);
int eventSize = amqpMessage.encode(this.eventBytes, 0, maxMessageSize);
eventSize += 16;
if (isFirst) {
amqpMessage.setBody(null);
amqpMessage.setApplicationProperties(null);
amqpMessage.setProperties(null);
amqpMessage.setDeliveryAnnotations(null);
eventSize += amqpMessage.encode(this.eventBytes, 0, maxMessageSize);
}
return eventSize;
}
/*
* Creates the AMQP message represented by the event data
*/
private Message createAmqpMessage(EventData event, String partitionKey) {
final Message message = Proton.message();
if (event.getProperties() != null && !event.getProperties().isEmpty()) {
final ApplicationProperties applicationProperties = new ApplicationProperties(event.getProperties());
message.setApplicationProperties(applicationProperties);
}
if (event.getSystemProperties() != null) {
event.getSystemProperties().forEach((key, value) -> {
if (EventData.RESERVED_SYSTEM_PROPERTIES.contains(key)) {
return;
}
final AmqpMessageConstant constant = AmqpMessageConstant.fromString(key);
if (constant != null) {
switch (constant) {
case MESSAGE_ID:
message.setMessageId(value);
break;
case USER_ID:
message.setUserId((byte[]) value);
break;
case TO:
message.setAddress((String) value);
break;
case SUBJECT:
message.setSubject((String) value);
break;
case REPLY_TO:
message.setReplyTo((String) value);
break;
case CORRELATION_ID:
message.setCorrelationId(value);
break;
case CONTENT_TYPE:
message.setContentType((String) value);
break;
case CONTENT_ENCODING:
message.setContentEncoding((String) value);
break;
case ABSOLUTE_EXPIRY_TIME:
message.setExpiryTime((long) value);
break;
case CREATION_TIME:
message.setCreationTime((long) value);
break;
case GROUP_ID:
message.setGroupId((String) value);
break;
case GROUP_SEQUENCE:
message.setGroupSequence((long) value);
break;
case REPLY_TO_GROUP_ID:
message.setReplyToGroupId((String) value);
break;
default:
throw logger.logExceptionAsWarning(new IllegalArgumentException(String.format(Locale.US,
"Property is not a recognized reserved property name: %s", key)));
}
} else {
final MessageAnnotations messageAnnotations = (message.getMessageAnnotations() == null)
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(Symbol.getSymbol(key), value);
message.setMessageAnnotations(messageAnnotations);
}
});
}
if (partitionKey != null) {
final MessageAnnotations messageAnnotations = (message.getMessageAnnotations() == null)
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey);
message.setMessageAnnotations(messageAnnotations);
}
message.setBody(new Data(new Binary(event.getBody())));
return message;
}
} |
fixed, thanks Connie | boolean tryAdd(final EventData eventData) {
if (eventData == null) {
throw logger.logExceptionAsWarning(new IllegalArgumentException("eventData cannot be null"));
}
EventData event = tracerProvider.isEnabled() ? traceMessageSpan(eventData) : eventData;
final int size;
try {
size = getSize(event, events.isEmpty());
} catch (BufferOverflowException exception) {
throw logger.logExceptionAsWarning(new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,
String.format(Locale.US, "Size of the payload exceeded maximum message size: %s kb",
maxMessageSize / 1024),
contextProvider.getErrorContext()));
}
synchronized (lock) {
if (this.sizeInBytes + size > this.maxMessageSize) {
return false;
}
this.sizeInBytes += size;
}
this.events.add(event);
return true;
} | throw logger.logExceptionAsWarning(new IllegalArgumentException("eventData cannot be null")); | boolean tryAdd(final EventData eventData) {
if (eventData == null) {
throw logger.logExceptionAsWarning(new NullPointerException("eventData cannot be null"));
}
EventData event = tracerProvider.isEnabled() ? traceMessageSpan(eventData) : eventData;
final int size;
try {
size = getSize(event, events.isEmpty());
} catch (BufferOverflowException exception) {
throw logger.logExceptionAsWarning(new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,
String.format(Locale.US, "Size of the payload exceeded maximum message size: %s kb",
maxMessageSize / 1024),
contextProvider.getErrorContext()));
}
synchronized (lock) {
if (this.sizeInBytes + size > this.maxMessageSize) {
return false;
}
this.sizeInBytes += size;
}
this.events.add(event);
return true;
} | class EventDataBatchBase {
private final ClientLogger logger = new ClientLogger(this.getClass());
private final Object lock = new Object();
private final int maxMessageSize;
private final String partitionKey;
private final ErrorContextProvider contextProvider;
private final List<EventData> events;
private final byte[] eventBytes;
private final String partitionId;
private int sizeInBytes;
private final TracerProvider tracerProvider;
private final String entityPath;
private final String hostname;
EventDataBatchBase(int maxMessageSize, String partitionId, String partitionKey,
ErrorContextProvider contextProvider, TracerProvider tracerProvider, String entityPath,
String hostname) {
this.maxMessageSize = maxMessageSize;
this.partitionKey = partitionKey;
this.partitionId = partitionId;
this.contextProvider = contextProvider;
this.events = new LinkedList<>();
this.sizeInBytes = (maxMessageSize / 65536) * 1024;
this.eventBytes = new byte[maxMessageSize];
this.tracerProvider = tracerProvider;
this.entityPath = entityPath;
this.hostname = hostname;
}
/**
* Gets the number of {@link EventData events} in the batch.
*
* @return The number of {@link EventData events} in the batch.
*/
public int getCount() {
return events.size();
}
/**
* Gets the maximum size, in bytes, of the {@link EventDataBatch}.
*
* @return The maximum size, in bytes, of the {@link EventDataBatch}.
*/
public int getMaxSizeInBytes() {
return maxMessageSize;
}
/**
* Gets the size of the {@link EventDataBatch} in bytes.
*
* @return the size of the {@link EventDataBatch} in bytes.
*/
public int getSizeInBytes() {
return this.sizeInBytes;
}
/**
* Tries to add an {@link EventData event} to the batch.
*
* @param eventData The {@link EventData} to add to the batch.
* @return {@code true} if the event could be added to the batch; {@code false} if the event was too large to fit in
* the batch.
* @throws IllegalArgumentException if {@code eventData} is {@code null}.
* @throws AmqpException if {@code eventData} is larger than the maximum size of the {@link EventDataBatch}.
*/
/**
* Method to start and end a "Azure.EventHubs.message" span and add the "DiagnosticId" as a property of the message.
*
* @param eventData The Event to add tracing span for.
* @return the updated event data object.
*/
EventData traceMessageSpan(EventData eventData) {
Optional<Object> eventContextData = eventData.getContext().getData(SPAN_CONTEXT_KEY);
if (eventContextData.isPresent()) {
return eventData;
} else {
Context eventContext = eventData.getContext()
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE)
.addData(ENTITY_PATH_KEY, this.entityPath)
.addData(HOST_NAME_KEY, this.hostname);
Context eventSpanContext = tracerProvider.startSpan(eventContext, ProcessKind.MESSAGE);
Optional<Object> eventDiagnosticIdOptional = eventSpanContext.getData(DIAGNOSTIC_ID_KEY);
if (eventDiagnosticIdOptional.isPresent()) {
eventData.getProperties().put(DIAGNOSTIC_ID_KEY, eventDiagnosticIdOptional.get().toString());
tracerProvider.endSpan(eventSpanContext, Signal.complete());
eventData.addContext(SPAN_CONTEXT_KEY, eventSpanContext);
}
}
return eventData;
}
List<EventData> getEvents() {
return events;
}
String getPartitionKey() {
return partitionKey;
}
String getPartitionId() {
return partitionId;
}
int getSize(final EventData eventData, final boolean isFirst) {
Objects.requireNonNull(eventData, "'eventData' cannot be null.");
final Message amqpMessage = createAmqpMessage(eventData, partitionKey);
int eventSize = amqpMessage.encode(this.eventBytes, 0, maxMessageSize);
eventSize += 16;
if (isFirst) {
amqpMessage.setBody(null);
amqpMessage.setApplicationProperties(null);
amqpMessage.setProperties(null);
amqpMessage.setDeliveryAnnotations(null);
eventSize += amqpMessage.encode(this.eventBytes, 0, maxMessageSize);
}
return eventSize;
}
/*
* Creates the AMQP message represented by the event data
*/
private Message createAmqpMessage(EventData event, String partitionKey) {
final Message message = Proton.message();
if (event.getProperties() != null && !event.getProperties().isEmpty()) {
final ApplicationProperties applicationProperties = new ApplicationProperties(event.getProperties());
message.setApplicationProperties(applicationProperties);
}
if (event.getSystemProperties() != null) {
event.getSystemProperties().forEach((key, value) -> {
if (EventData.RESERVED_SYSTEM_PROPERTIES.contains(key)) {
return;
}
final AmqpMessageConstant constant = AmqpMessageConstant.fromString(key);
if (constant != null) {
switch (constant) {
case MESSAGE_ID:
message.setMessageId(value);
break;
case USER_ID:
message.setUserId((byte[]) value);
break;
case TO:
message.setAddress((String) value);
break;
case SUBJECT:
message.setSubject((String) value);
break;
case REPLY_TO:
message.setReplyTo((String) value);
break;
case CORRELATION_ID:
message.setCorrelationId(value);
break;
case CONTENT_TYPE:
message.setContentType((String) value);
break;
case CONTENT_ENCODING:
message.setContentEncoding((String) value);
break;
case ABSOLUTE_EXPIRY_TIME:
message.setExpiryTime((long) value);
break;
case CREATION_TIME:
message.setCreationTime((long) value);
break;
case GROUP_ID:
message.setGroupId((String) value);
break;
case GROUP_SEQUENCE:
message.setGroupSequence((long) value);
break;
case REPLY_TO_GROUP_ID:
message.setReplyToGroupId((String) value);
break;
default:
throw logger.logExceptionAsWarning(new IllegalArgumentException(String.format(Locale.US,
"Property is not a recognized reserved property name: %s", key)));
}
} else {
final MessageAnnotations messageAnnotations = (message.getMessageAnnotations() == null)
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(Symbol.getSymbol(key), value);
message.setMessageAnnotations(messageAnnotations);
}
});
}
if (partitionKey != null) {
final MessageAnnotations messageAnnotations = (message.getMessageAnnotations() == null)
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey);
message.setMessageAnnotations(messageAnnotations);
}
message.setBody(new Data(new Binary(event.getBody())));
return message;
}
} | class EventDataBatchBase {
private final ClientLogger logger = new ClientLogger(this.getClass());
private final Object lock = new Object();
private final int maxMessageSize;
private final String partitionKey;
private final ErrorContextProvider contextProvider;
private final List<EventData> events;
private final byte[] eventBytes;
private final String partitionId;
private int sizeInBytes;
private final TracerProvider tracerProvider;
private final String entityPath;
private final String hostname;
EventDataBatchBase(int maxMessageSize, String partitionId, String partitionKey,
ErrorContextProvider contextProvider, TracerProvider tracerProvider, String entityPath,
String hostname) {
this.maxMessageSize = maxMessageSize;
this.partitionKey = partitionKey;
this.partitionId = partitionId;
this.contextProvider = contextProvider;
this.events = new LinkedList<>();
this.sizeInBytes = (maxMessageSize / 65536) * 1024;
this.eventBytes = new byte[maxMessageSize];
this.tracerProvider = tracerProvider;
this.entityPath = entityPath;
this.hostname = hostname;
}
/**
* Gets the number of {@link EventData events} in the batch.
*
* @return The number of {@link EventData events} in the batch.
*/
public int getCount() {
return events.size();
}
/**
* Gets the maximum size, in bytes, of the {@link EventDataBatch}.
*
* @return The maximum size, in bytes, of the {@link EventDataBatch}.
*/
public int getMaxSizeInBytes() {
return maxMessageSize;
}
/**
* Gets the size of the {@link EventDataBatch} in bytes.
*
* @return the size of the {@link EventDataBatch} in bytes.
*/
public int getSizeInBytes() {
return this.sizeInBytes;
}
/**
* Tries to add an {@link EventData event} to the batch.
*
* @param eventData The {@link EventData} to add to the batch.
* @return {@code true} if the event could be added to the batch; {@code false} if the event was too large to fit in
* the batch.
* @throws IllegalArgumentException if {@code eventData} is {@code null}.
* @throws AmqpException if {@code eventData} is larger than the maximum size of the {@link EventDataBatch}.
*/
/**
* Method to start and end a "Azure.EventHubs.message" span and add the "DiagnosticId" as a property of the message.
*
* @param eventData The Event to add tracing span for.
* @return the updated event data object.
*/
EventData traceMessageSpan(EventData eventData) {
Optional<Object> eventContextData = eventData.getContext().getData(SPAN_CONTEXT_KEY);
if (eventContextData.isPresent()) {
return eventData;
} else {
Context eventContext = eventData.getContext()
.addData(AZ_TRACING_NAMESPACE_KEY, AZ_NAMESPACE_VALUE)
.addData(ENTITY_PATH_KEY, this.entityPath)
.addData(HOST_NAME_KEY, this.hostname);
Context eventSpanContext = tracerProvider.startSpan(eventContext, ProcessKind.MESSAGE);
Optional<Object> eventDiagnosticIdOptional = eventSpanContext.getData(DIAGNOSTIC_ID_KEY);
if (eventDiagnosticIdOptional.isPresent()) {
eventData.getProperties().put(DIAGNOSTIC_ID_KEY, eventDiagnosticIdOptional.get().toString());
tracerProvider.endSpan(eventSpanContext, Signal.complete());
eventData.addContext(SPAN_CONTEXT_KEY, eventSpanContext);
}
}
return eventData;
}
List<EventData> getEvents() {
return events;
}
String getPartitionKey() {
return partitionKey;
}
String getPartitionId() {
return partitionId;
}
int getSize(final EventData eventData, final boolean isFirst) {
Objects.requireNonNull(eventData, "'eventData' cannot be null.");
final Message amqpMessage = createAmqpMessage(eventData, partitionKey);
int eventSize = amqpMessage.encode(this.eventBytes, 0, maxMessageSize);
eventSize += 16;
if (isFirst) {
amqpMessage.setBody(null);
amqpMessage.setApplicationProperties(null);
amqpMessage.setProperties(null);
amqpMessage.setDeliveryAnnotations(null);
eventSize += amqpMessage.encode(this.eventBytes, 0, maxMessageSize);
}
return eventSize;
}
/*
* Creates the AMQP message represented by the event data
*/
private Message createAmqpMessage(EventData event, String partitionKey) {
final Message message = Proton.message();
if (event.getProperties() != null && !event.getProperties().isEmpty()) {
final ApplicationProperties applicationProperties = new ApplicationProperties(event.getProperties());
message.setApplicationProperties(applicationProperties);
}
if (event.getSystemProperties() != null) {
event.getSystemProperties().forEach((key, value) -> {
if (EventData.RESERVED_SYSTEM_PROPERTIES.contains(key)) {
return;
}
final AmqpMessageConstant constant = AmqpMessageConstant.fromString(key);
if (constant != null) {
switch (constant) {
case MESSAGE_ID:
message.setMessageId(value);
break;
case USER_ID:
message.setUserId((byte[]) value);
break;
case TO:
message.setAddress((String) value);
break;
case SUBJECT:
message.setSubject((String) value);
break;
case REPLY_TO:
message.setReplyTo((String) value);
break;
case CORRELATION_ID:
message.setCorrelationId(value);
break;
case CONTENT_TYPE:
message.setContentType((String) value);
break;
case CONTENT_ENCODING:
message.setContentEncoding((String) value);
break;
case ABSOLUTE_EXPIRY_TIME:
message.setExpiryTime((long) value);
break;
case CREATION_TIME:
message.setCreationTime((long) value);
break;
case GROUP_ID:
message.setGroupId((String) value);
break;
case GROUP_SEQUENCE:
message.setGroupSequence((long) value);
break;
case REPLY_TO_GROUP_ID:
message.setReplyToGroupId((String) value);
break;
default:
throw logger.logExceptionAsWarning(new IllegalArgumentException(String.format(Locale.US,
"Property is not a recognized reserved property name: %s", key)));
}
} else {
final MessageAnnotations messageAnnotations = (message.getMessageAnnotations() == null)
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(Symbol.getSymbol(key), value);
message.setMessageAnnotations(messageAnnotations);
}
});
}
if (partitionKey != null) {
final MessageAnnotations messageAnnotations = (message.getMessageAnnotations() == null)
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey);
message.setMessageAnnotations(messageAnnotations);
}
message.setBody(new Data(new Binary(event.getBody())));
return message;
}
} |
Track1 does this ``` private Observable<VirtualMachineInner> retrieveVirtualMachineAsync() { return this.computeManager .inner() .virtualMachines() .getByResourceGroupAsync(rgName, vmName, InstanceViewTypes.INSTANCE_VIEW) .flatMap(new Func1<VirtualMachineInner, Observable<VirtualMachineInner>>() { @Override public Observable<VirtualMachineInner> call(VirtualMachineInner virtualMachine) { if (virtualMachine == null) { return Observable.error(new Exception(String.format("VM with name '%s' not found (resource group '%s')", vmName, rgName))); } return Observable.just(virtualMachine); } }); } ``` Basically it convert null to Exception. Since we already has Exception here, no need for this code. --- I've added `// Exception if vm not found` comment in these locations, in case we need to switch back the GET 404 response to `Mono.empty()` without `ManagementException`. | private Mono<VirtualMachineInner> retrieveVirtualMachineAsync() {
return this
.computeManager
.inner()
.getVirtualMachines()
.getByResourceGroupAsync(rgName, vmName);
} | } | private Mono<VirtualMachineInner> retrieveVirtualMachineAsync() {
return this
.computeManager
.inner()
.getVirtualMachines()
.getByResourceGroupAsync(rgName, vmName);
} | class WindowsVolumeNoAADEncryptionMonitorImpl implements DiskVolumeEncryptionMonitor {
private final String rgName;
private final String vmName;
private final ComputeManager computeManager;
private VirtualMachineInner virtualMachine;
WindowsVolumeNoAADEncryptionMonitorImpl(String virtualMachineId, ComputeManager computeManager) {
this.rgName = ResourceUtils.groupFromResourceId(virtualMachineId);
this.vmName = ResourceUtils.nameFromResourceId(virtualMachineId);
this.computeManager = computeManager;
}
@Override
public OperatingSystemTypes osType() {
return OperatingSystemTypes.WINDOWS;
}
@Override
public String progressMessage() {
return String.format("OSDisk: %s DataDisk: %s", osDiskStatus(), dataDiskStatus());
}
@Override
public EncryptionStatus osDiskStatus() {
if (virtualMachine.instanceView() == null || virtualMachine.instanceView().disks() == null) {
return EncryptionStatus.UNKNOWN;
}
for (DiskInstanceView diskInstanceView : virtualMachine.instanceView().disks()) {
if (diskInstanceView.encryptionSettings() != null) {
for (InstanceViewStatus status : diskInstanceView.statuses()) {
EncryptionStatus encryptionStatus = encryptionStatusFromCode(status.code());
if (encryptionStatus != null) {
return encryptionStatus;
}
}
break;
}
}
return EncryptionStatus.UNKNOWN;
}
@Override
public EncryptionStatus dataDiskStatus() {
if (virtualMachine.instanceView() == null || virtualMachine.instanceView().disks() == null) {
return EncryptionStatus.UNKNOWN;
}
HashSet<EncryptionStatus> encryptStatuses = new HashSet<>();
for (DiskInstanceView diskInstanceView : virtualMachine.instanceView().disks()) {
if (diskInstanceView.encryptionSettings() != null) {
continue;
}
for (InstanceViewStatus status : diskInstanceView.statuses()) {
EncryptionStatus encryptionStatus = encryptionStatusFromCode(status.code());
if (encryptionStatus != null) {
encryptStatuses.add(encryptionStatus);
break;
}
}
}
if (encryptStatuses.isEmpty()) {
return EncryptionStatus.UNKNOWN;
} else if (encryptStatuses.size() == 1) {
return encryptStatuses.iterator().next();
}
if (encryptStatuses.contains(EncryptionStatus.UNKNOWN)) {
return EncryptionStatus.UNKNOWN;
} else if (encryptStatuses.contains(EncryptionStatus.NOT_MOUNTED)) {
return EncryptionStatus.NOT_MOUNTED;
} else if (encryptStatuses.contains(EncryptionStatus.ENCRYPTION_INPROGRESS)) {
return EncryptionStatus.ENCRYPTION_INPROGRESS;
} else if (encryptStatuses.contains(EncryptionStatus.VM_RESTART_PENDING)) {
return EncryptionStatus.VM_RESTART_PENDING;
} else {
return EncryptionStatus.UNKNOWN;
}
}
@Override
public Map<String, InstanceViewStatus> diskInstanceViewEncryptionStatuses() {
if (virtualMachine.instanceView() == null || virtualMachine.instanceView().disks() == null) {
return new HashMap<>();
}
HashMap<String, InstanceViewStatus> div = new HashMap<String, InstanceViewStatus>();
for (DiskInstanceView diskInstanceView : virtualMachine.instanceView().disks()) {
for (InstanceViewStatus status : diskInstanceView.statuses()) {
if (encryptionStatusFromCode(status.code()) != null) {
div.put(diskInstanceView.name(), status);
break;
}
}
}
return div;
}
@Override
public DiskVolumeEncryptionMonitor refresh() {
return refreshAsync().block();
}
@Override
public Mono<DiskVolumeEncryptionMonitor> refreshAsync() {
final WindowsVolumeNoAADEncryptionMonitorImpl self = this;
return retrieveVirtualMachineAsync()
.map(
virtualMachine -> {
self.virtualMachine = virtualMachine;
return self;
});
}
/**
* Retrieve the virtual machine. If the virtual machine does not exists then an error observable will be returned.
*
* @return the retrieved virtual machine
*/
/**
* Given disk instance view status code, check whether it is encryption status code if yes map it to
* EncryptionStatus.
*
* @param code the encryption status code
* @return mapped EncryptionStatus if given code is encryption status code, null otherwise.
*/
private static EncryptionStatus encryptionStatusFromCode(String code) {
if (code != null && code.toLowerCase(Locale.ROOT).startsWith("encryptionstate")) {
String[] parts = code.split("/", 2);
if (parts.length != 2) {
return EncryptionStatus.UNKNOWN;
} else {
return EncryptionStatus.fromString(parts[1]);
}
}
return null;
}
} | class WindowsVolumeNoAADEncryptionMonitorImpl implements DiskVolumeEncryptionMonitor {
private final String rgName;
private final String vmName;
private final ComputeManager computeManager;
private VirtualMachineInner virtualMachine;
WindowsVolumeNoAADEncryptionMonitorImpl(String virtualMachineId, ComputeManager computeManager) {
this.rgName = ResourceUtils.groupFromResourceId(virtualMachineId);
this.vmName = ResourceUtils.nameFromResourceId(virtualMachineId);
this.computeManager = computeManager;
}
@Override
public OperatingSystemTypes osType() {
return OperatingSystemTypes.WINDOWS;
}
@Override
public String progressMessage() {
return String.format("OSDisk: %s DataDisk: %s", osDiskStatus(), dataDiskStatus());
}
@Override
public EncryptionStatus osDiskStatus() {
if (virtualMachine.instanceView() == null || virtualMachine.instanceView().disks() == null) {
return EncryptionStatus.UNKNOWN;
}
for (DiskInstanceView diskInstanceView : virtualMachine.instanceView().disks()) {
if (diskInstanceView.encryptionSettings() != null) {
for (InstanceViewStatus status : diskInstanceView.statuses()) {
EncryptionStatus encryptionStatus = encryptionStatusFromCode(status.code());
if (encryptionStatus != null) {
return encryptionStatus;
}
}
break;
}
}
return EncryptionStatus.UNKNOWN;
}
@Override
public EncryptionStatus dataDiskStatus() {
if (virtualMachine.instanceView() == null || virtualMachine.instanceView().disks() == null) {
return EncryptionStatus.UNKNOWN;
}
HashSet<EncryptionStatus> encryptStatuses = new HashSet<>();
for (DiskInstanceView diskInstanceView : virtualMachine.instanceView().disks()) {
if (diskInstanceView.encryptionSettings() != null) {
continue;
}
for (InstanceViewStatus status : diskInstanceView.statuses()) {
EncryptionStatus encryptionStatus = encryptionStatusFromCode(status.code());
if (encryptionStatus != null) {
encryptStatuses.add(encryptionStatus);
break;
}
}
}
if (encryptStatuses.isEmpty()) {
return EncryptionStatus.UNKNOWN;
} else if (encryptStatuses.size() == 1) {
return encryptStatuses.iterator().next();
}
if (encryptStatuses.contains(EncryptionStatus.UNKNOWN)) {
return EncryptionStatus.UNKNOWN;
} else if (encryptStatuses.contains(EncryptionStatus.NOT_MOUNTED)) {
return EncryptionStatus.NOT_MOUNTED;
} else if (encryptStatuses.contains(EncryptionStatus.ENCRYPTION_INPROGRESS)) {
return EncryptionStatus.ENCRYPTION_INPROGRESS;
} else if (encryptStatuses.contains(EncryptionStatus.VM_RESTART_PENDING)) {
return EncryptionStatus.VM_RESTART_PENDING;
} else {
return EncryptionStatus.UNKNOWN;
}
}
@Override
public Map<String, InstanceViewStatus> diskInstanceViewEncryptionStatuses() {
if (virtualMachine.instanceView() == null || virtualMachine.instanceView().disks() == null) {
return new HashMap<>();
}
HashMap<String, InstanceViewStatus> div = new HashMap<String, InstanceViewStatus>();
for (DiskInstanceView diskInstanceView : virtualMachine.instanceView().disks()) {
for (InstanceViewStatus status : diskInstanceView.statuses()) {
if (encryptionStatusFromCode(status.code()) != null) {
div.put(diskInstanceView.name(), status);
break;
}
}
}
return div;
}
@Override
public DiskVolumeEncryptionMonitor refresh() {
return refreshAsync().block();
}
@Override
public Mono<DiskVolumeEncryptionMonitor> refreshAsync() {
final WindowsVolumeNoAADEncryptionMonitorImpl self = this;
return retrieveVirtualMachineAsync()
.map(
virtualMachine -> {
self.virtualMachine = virtualMachine;
return self;
});
}
/**
* Retrieve the virtual machine. If the virtual machine does not exists then an error observable will be returned.
*
* @return the retrieved virtual machine
*/
/**
* Given disk instance view status code, check whether it is encryption status code if yes map it to
* EncryptionStatus.
*
* @param code the encryption status code
* @return mapped EncryptionStatus if given code is encryption status code, null otherwise.
*/
private static EncryptionStatus encryptionStatusFromCode(String code) {
if (code != null && code.toLowerCase(Locale.ROOT).startsWith("encryptionstate")) {
String[] parts = code.split("/", 2);
if (parts.length != 2) {
return EncryptionStatus.UNKNOWN;
} else {
return EncryptionStatus.fromString(parts[1]);
}
}
return null;
}
} |
I think it could just be removed, due to other list will not deal with 404. | public PagedFlux<VirtualMachineExtensionImage> listByRegionAsync(String regionName) {
return PagedConverter
.flatMapPage(
publishers.listByRegionAsync(regionName),
virtualMachinePublisher ->
virtualMachinePublisher
.extensionTypes()
.listAsync()
.onErrorResume(ManagementException.class,
e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e))
.flatMap(virtualMachineExtensionImageType ->
virtualMachineExtensionImageType.versions().listAsync())
.flatMap(VirtualMachineExtensionImageVersion::getImageAsync));
} | e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e)) | public PagedFlux<VirtualMachineExtensionImage> listByRegionAsync(String regionName) {
return PagedConverter
.flatMapPage(
publishers.listByRegionAsync(regionName),
virtualMachinePublisher ->
virtualMachinePublisher
.extensionTypes()
.listAsync()
.onErrorResume(ManagementException.class,
e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e))
.flatMap(virtualMachineExtensionImageType ->
virtualMachineExtensionImageType.versions().listAsync())
.flatMap(VirtualMachineExtensionImageVersion::getImageAsync));
} | class VirtualMachineExtensionImagesImpl implements VirtualMachineExtensionImages {
private final VirtualMachinePublishers publishers;
public VirtualMachineExtensionImagesImpl(VirtualMachinePublishers publishers) {
this.publishers = publishers;
}
@Override
public PagedIterable<VirtualMachineExtensionImage> listByRegion(Region region) {
return listByRegion(region.toString());
}
@Override
public PagedIterable<VirtualMachineExtensionImage> listByRegion(String regionName) {
return new PagedIterable<>(listByRegionAsync(regionName));
}
@Override
public PagedFlux<VirtualMachineExtensionImage> listByRegionAsync(Region region) {
return listByRegionAsync(region.name());
}
@Override
@Override
public VirtualMachinePublishers publishers() {
return this.publishers;
}
} | class VirtualMachineExtensionImagesImpl implements VirtualMachineExtensionImages {
private final VirtualMachinePublishers publishers;
public VirtualMachineExtensionImagesImpl(VirtualMachinePublishers publishers) {
this.publishers = publishers;
}
@Override
public PagedIterable<VirtualMachineExtensionImage> listByRegion(Region region) {
return listByRegion(region.toString());
}
@Override
public PagedIterable<VirtualMachineExtensionImage> listByRegion(String regionName) {
return new PagedIterable<>(listByRegionAsync(regionName));
}
@Override
public PagedFlux<VirtualMachineExtensionImage> listByRegionAsync(Region region) {
return listByRegionAsync(region.name());
}
@Override
@Override
public VirtualMachinePublishers publishers() {
return this.publishers;
}
} |
same as above | public PagedFlux<VirtualMachineImage> listByRegionAsync(String regionName) {
return PagedConverter
.flatMapPage(
publishers().listByRegionAsync(regionName),
virtualMachinePublisher ->
virtualMachinePublisher
.offers()
.listAsync()
.onErrorResume(ManagementException.class,
e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e))
.flatMap(virtualMachineOffer -> virtualMachineOffer.skus().listAsync())
.flatMap(virtualMachineSku -> virtualMachineSku.images().listAsync()));
} | e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e)) | public PagedFlux<VirtualMachineImage> listByRegionAsync(String regionName) {
return PagedConverter
.flatMapPage(
publishers().listByRegionAsync(regionName),
virtualMachinePublisher ->
virtualMachinePublisher
.offers()
.listAsync()
.onErrorResume(ManagementException.class,
e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e))
.flatMap(virtualMachineOffer -> virtualMachineOffer.skus().listAsync())
.flatMap(virtualMachineSku -> virtualMachineSku.images().listAsync()));
} | class VirtualMachineImagesImpl implements VirtualMachineImages {
private final VirtualMachinePublishers publishers;
private final VirtualMachineImagesClient client;
public VirtualMachineImagesImpl(VirtualMachinePublishers publishers, VirtualMachineImagesClient client) {
this.publishers = publishers;
this.client = client;
}
@Override
public VirtualMachineImage getImage(
Region region, String publisherName, String offerName, String skuName, String version) {
if (version.equalsIgnoreCase("latest")) {
List<VirtualMachineImageResourceInner> innerImages =
this.client.list(region.name(), publisherName, offerName, skuName, null, 1, "name desc");
if (innerImages != null && !innerImages.isEmpty()) {
VirtualMachineImageResourceInner innerImageResource = innerImages.get(0);
version = innerImageResource.name();
}
}
VirtualMachineImageInner innerImage =
this.client.get(region.name(), publisherName, offerName, skuName, version);
return (innerImage != null)
? new VirtualMachineImageImpl(region, publisherName, offerName, skuName, version, innerImage)
: null;
}
@Override
public VirtualMachineImage getImage(
String region, String publisherName, String offerName, String skuName, String version) {
if (version.equalsIgnoreCase("latest")) {
List<VirtualMachineImageResourceInner> innerImages =
this.client.list(region, publisherName, offerName, skuName, null, 1, "name desc");
if (innerImages != null && !innerImages.isEmpty()) {
VirtualMachineImageResourceInner innerImageResource = innerImages.get(0);
version = innerImageResource.name();
}
}
VirtualMachineImageInner innerImage = this.client.get(region, publisherName, offerName, skuName, version);
return (innerImage != null)
? new VirtualMachineImageImpl(
Region.fromName(region), publisherName, offerName, skuName, version, innerImage)
: null;
}
@Override
public PagedIterable<VirtualMachineImage> listByRegion(Region location) {
return listByRegion(location.toString());
}
@Override
public PagedIterable<VirtualMachineImage> listByRegion(String regionName) {
return new PagedIterable<>(listByRegionAsync(regionName));
}
@Override
public PagedFlux<VirtualMachineImage> listByRegionAsync(Region region) {
return listByRegionAsync(region.name());
}
@Override
@Override
public VirtualMachinePublishers publishers() {
return this.publishers;
}
} | class VirtualMachineImagesImpl implements VirtualMachineImages {
private final VirtualMachinePublishers publishers;
private final VirtualMachineImagesClient client;
public VirtualMachineImagesImpl(VirtualMachinePublishers publishers, VirtualMachineImagesClient client) {
this.publishers = publishers;
this.client = client;
}
@Override
public VirtualMachineImage getImage(
Region region, String publisherName, String offerName, String skuName, String version) {
if (version.equalsIgnoreCase("latest")) {
List<VirtualMachineImageResourceInner> innerImages =
this.client.list(region.name(), publisherName, offerName, skuName, null, 1, "name desc");
if (innerImages != null && !innerImages.isEmpty()) {
VirtualMachineImageResourceInner innerImageResource = innerImages.get(0);
version = innerImageResource.name();
}
}
VirtualMachineImageInner innerImage =
this.client.get(region.name(), publisherName, offerName, skuName, version);
return (innerImage != null)
? new VirtualMachineImageImpl(region, publisherName, offerName, skuName, version, innerImage)
: null;
}
@Override
public VirtualMachineImage getImage(
String region, String publisherName, String offerName, String skuName, String version) {
if (version.equalsIgnoreCase("latest")) {
List<VirtualMachineImageResourceInner> innerImages =
this.client.list(region, publisherName, offerName, skuName, null, 1, "name desc");
if (innerImages != null && !innerImages.isEmpty()) {
VirtualMachineImageResourceInner innerImageResource = innerImages.get(0);
version = innerImageResource.name();
}
}
VirtualMachineImageInner innerImage = this.client.get(region, publisherName, offerName, skuName, version);
return (innerImage != null)
? new VirtualMachineImageImpl(
Region.fromName(region), publisherName, offerName, skuName, version, innerImage)
: null;
}
@Override
public PagedIterable<VirtualMachineImage> listByRegion(Region location) {
return listByRegion(location.toString());
}
@Override
public PagedIterable<VirtualMachineImage> listByRegion(String regionName) {
return new PagedIterable<>(listByRegionAsync(regionName));
}
@Override
public PagedFlux<VirtualMachineImage> listByRegionAsync(Region region) {
return listByRegionAsync(region.name());
}
@Override
@Override
public VirtualMachinePublishers publishers() {
return this.publishers;
}
} |
This is because for 1 or 2 publisher, above list image type would fail with 404 (error on publisher not valid). But apparently we would still want to continue with the other publishers. | public PagedFlux<VirtualMachineExtensionImage> listByRegionAsync(String regionName) {
return PagedConverter
.flatMapPage(
publishers.listByRegionAsync(regionName),
virtualMachinePublisher ->
virtualMachinePublisher
.extensionTypes()
.listAsync()
.onErrorResume(ManagementException.class,
e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e))
.flatMap(virtualMachineExtensionImageType ->
virtualMachineExtensionImageType.versions().listAsync())
.flatMap(VirtualMachineExtensionImageVersion::getImageAsync));
} | e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e)) | public PagedFlux<VirtualMachineExtensionImage> listByRegionAsync(String regionName) {
return PagedConverter
.flatMapPage(
publishers.listByRegionAsync(regionName),
virtualMachinePublisher ->
virtualMachinePublisher
.extensionTypes()
.listAsync()
.onErrorResume(ManagementException.class,
e -> e.getResponse().getStatusCode() == 404 ? Flux.empty() : Flux.error(e))
.flatMap(virtualMachineExtensionImageType ->
virtualMachineExtensionImageType.versions().listAsync())
.flatMap(VirtualMachineExtensionImageVersion::getImageAsync));
} | class VirtualMachineExtensionImagesImpl implements VirtualMachineExtensionImages {
private final VirtualMachinePublishers publishers;
public VirtualMachineExtensionImagesImpl(VirtualMachinePublishers publishers) {
this.publishers = publishers;
}
@Override
public PagedIterable<VirtualMachineExtensionImage> listByRegion(Region region) {
return listByRegion(region.toString());
}
@Override
public PagedIterable<VirtualMachineExtensionImage> listByRegion(String regionName) {
return new PagedIterable<>(listByRegionAsync(regionName));
}
@Override
public PagedFlux<VirtualMachineExtensionImage> listByRegionAsync(Region region) {
return listByRegionAsync(region.name());
}
@Override
@Override
public VirtualMachinePublishers publishers() {
return this.publishers;
}
} | class VirtualMachineExtensionImagesImpl implements VirtualMachineExtensionImages {
private final VirtualMachinePublishers publishers;
public VirtualMachineExtensionImagesImpl(VirtualMachinePublishers publishers) {
this.publishers = publishers;
}
@Override
public PagedIterable<VirtualMachineExtensionImage> listByRegion(Region region) {
return listByRegion(region.toString());
}
@Override
public PagedIterable<VirtualMachineExtensionImage> listByRegion(String regionName) {
return new PagedIterable<>(listByRegionAsync(regionName));
}
@Override
public PagedFlux<VirtualMachineExtensionImage> listByRegionAsync(Region region) {
return listByRegionAsync(region.name());
}
@Override
@Override
public VirtualMachinePublishers publishers() {
return this.publishers;
}
} |
Why `AtomicReference` over `AtomicInteger`? | public void verifyExceptionPropagationFromPollingOperation() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
throw new RuntimeException("Polling operation failed!");
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("2")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("3")));
}
};
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectErrorMessage("Polling operation failed!")
.verify();
} | final AtomicReference<Integer> cnt = new AtomicReference<>(0); | public void verifyExceptionPropagationFromPollingOperation() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
throw new RuntimeException("Polling operation failed!");
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("2")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("3")));
}
};
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectErrorMessage("Polling operation failed!")
.verify();
} | class PollerTests {
@Mock
private Function<PollingContext<Response>, Mono<Response>> activationOperation;
@Mock
private Function<PollingContext<Response>, Mono<PollResponse<Response>>> activationOperationWithResponse;
@Mock
private Function<PollingContext<Response>, Mono<PollResponse<Response>>> pollOperation;
@Mock
private Function<PollingContext<Response>, Mono<CertificateOutput>> fetchResultOperation;
@Mock
private BiFunction<PollingContext<Response>, PollResponse<Response>, Mono<Response>> cancelOperation;
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Test
public void asyncPollerConstructorPollIntervalZero() {
assertThrows(IllegalArgumentException.class, () -> new PollerFlux<>(
Duration.ZERO,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollIntervalNegative() {
assertThrows(IllegalArgumentException.class, () -> new PollerFlux<>(
Duration.ofSeconds(-1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollIntervalNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
null,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorActivationOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
null,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
null,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorCancelOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
null,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorFetchResultOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
null));
}
@Test
public void subscribeToSpecificOtherOperationStatusTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_1", false),
new Response("2"), retryAfter);
PollResponse<Response> response3 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_2", false),
new Response("3"), retryAfter);
PollResponse<Response> response4 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("4"), retryAfter);
when(activationOperation.apply(any())).thenReturn(Mono.empty());
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2),
Mono.just(response3),
Mono.just(response4));
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response3.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response4.getStatus())
.verifyComplete();
}
@Test
public void noPollingForSynchronouslyCompletedActivationTest() {
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperationWithResponse.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new PollResponse<Response>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("ActivationDone")));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = create(
Duration.ofSeconds(1),
activationOperationWithResponse,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.error(new RuntimeException("Polling shouldn't happen for synchronously completed activation.")));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus()
== LongRunningOperationStatus.SUCCESSFULLY_COMPLETED)
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void ensurePollingForInProgressActivationResponseTest() {
final Duration retryAfter = Duration.ofMillis(100);
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperationWithResponse.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new PollResponse<Response>(IN_PROGRESS,
new Response("ActivationDone")));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = create(
Duration.ofSeconds(1),
activationOperationWithResponse,
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_1", false),
new Response("2"), retryAfter);
PollResponse<Response> response3 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("3"), retryAfter);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2),
Mono.just(response3));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response3.getStatus())
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void subscribeToActivationOnlyOnceTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new Response("ActivationDone"));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.verifyComplete();
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void cancellationCanBeCalledFromOperatorChainTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final List<Object> cancelParameters = new ArrayList<>();
when(cancelOperation.apply(any(), any())).thenAnswer((Answer) invocation -> {
for (Object argument : invocation.getArguments()) {
cancelParameters.add(argument);
}
return Mono.just(new Response("OperationCancelled"));
});
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
@SuppressWarnings({"rawtypes"})
final AsyncPollResponse<Response, CertificateOutput>[] secondAsyncResponse = new AsyncPollResponse[1];
secondAsyncResponse[0] = null;
Response cancelResponse = pollerFlux
.take(2)
.last()
.flatMap((Function<AsyncPollResponse<Response, CertificateOutput>, Mono<Response>>) asyncPollResponse -> {
secondAsyncResponse[0] = asyncPollResponse;
return asyncPollResponse.cancelOperation();
}).block();
Assertions.assertNotNull(cancelResponse);
Assertions.assertTrue(cancelResponse.getResponse().equalsIgnoreCase("OperationCancelled"));
Assertions.assertNotNull(secondAsyncResponse[0]);
Assertions.assertTrue(secondAsyncResponse[0].getValue().getResponse().equalsIgnoreCase("1"));
Assertions.assertEquals(2, cancelParameters.size());
cancelParameters.get(0).equals(activationResponse);
cancelParameters.get(1).equals(response1);
}
@Test
public void getResultCanBeCalledFromOperatorChainTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final List<Object> fetchResultParameters = new ArrayList<>();
when(fetchResultOperation.apply(any())).thenAnswer((Answer) invocation -> {
for (Object argument : invocation.getArguments()) {
fetchResultParameters.add(argument);
}
return Mono.just(new CertificateOutput("LROFinalResult"));
});
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
@SuppressWarnings({"rawtypes"})
final AsyncPollResponse<Response, CertificateOutput>[] terminalAsyncResponse = new AsyncPollResponse[1];
terminalAsyncResponse[0] = null;
CertificateOutput lroResult = pollerFlux
.takeUntil(apr -> apr.getStatus().isComplete())
.last()
.flatMap((Function<AsyncPollResponse<Response, CertificateOutput>, Mono<CertificateOutput>>)
asyncPollResponse -> {
terminalAsyncResponse[0] = asyncPollResponse;
return asyncPollResponse.getFinalResult();
}).block();
Assertions.assertNotNull(lroResult);
Assertions.assertTrue(lroResult.getName().equalsIgnoreCase("LROFinalResult"));
Assertions.assertNotNull(terminalAsyncResponse[0]);
Assertions.assertTrue(terminalAsyncResponse[0].getValue().getResponse().equalsIgnoreCase("2"));
Assertions.assertEquals(1, fetchResultParameters.size());
Assertions.assertTrue(fetchResultParameters.get(0) instanceof PollingContext);
PollingContext<Response> pollingContext = (PollingContext<Response>) fetchResultParameters.get(0);
pollingContext.getActivationResponse().equals(activationResponse);
pollingContext.getLatestResponse().equals(response2);
}
@Test
@Test
public void verifyErrorFromPollingOperation() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
return Mono.just(new PollResponse<Response>(FAILED, new Response("2")));
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("3")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("4")));
}
};
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == FAILED)
.verifyComplete();
}
@Test
public void syncPollerConstructorPollIntervalZero() {
assertThrows(IllegalArgumentException.class, () -> new DefaultSyncPoller<>(
Duration.ZERO,
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollIntervalNegative() {
assertThrows(IllegalArgumentException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(-1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollIntervalNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
null,
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncConstructorActivationOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
null,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
null,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorCancelOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
null,
fetchResultOperation));
}
@Test
public void syncPollerConstructorFetchResultOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
null));
}
@Test
public void syncPollerShouldCallActivationFromConstructor() {
Boolean[] activationCalled = new Boolean[1];
activationCalled[0] = false;
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> {
activationCalled[0] = true;
return Mono.just(new Response("ActivationDone"));
}));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
Assertions.assertTrue(activationCalled[0]);
}
@Test
public void eachPollShouldReceiveLastPollResponse() {
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(new Response("A"))));
when(pollOperation.apply(any())).thenAnswer((Answer) invocation -> {
Assertions.assertEquals(1, invocation.getArguments().length);
Assertions.assertTrue(invocation.getArguments()[0] instanceof PollingContext);
PollingContext<Response> pollingContext = (PollingContext<Response>) invocation.getArguments()[0];
Assertions.assertTrue(pollingContext.getActivationResponse() instanceof PollResponse);
Assertions.assertTrue(pollingContext.getLatestResponse() instanceof PollResponse);
PollResponse<Response> latestResponse = pollingContext.getLatestResponse();
Assertions.assertNotNull(latestResponse);
PollResponse<Response> nextResponse = new PollResponse<>(IN_PROGRESS,
new Response(latestResponse.getValue().toString() + "A"), Duration.ofMillis(100));
return Mono.just(nextResponse);
});
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: AA"));
pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: Response: AAA"));
pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: Response: Response: AAAA"));
}
@Test
public void waitForCompletionShouldReturnTerminalPollResponse() {
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100));
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100));
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100));
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitForCompletion();
Assertions.assertNotNull(pollResponse.getValue());
Assertions.assertEquals(response2.getValue().getResponse(), pollResponse.getValue().getResponse());
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, pollResponse.getStatus());
}
@Test
public void getResultShouldPollUntilCompletionAndFetchResult() {
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
int[] invocationCount = new int[1];
invocationCount[0] = -1;
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
invocationCount[0]++;
switch (invocationCount[0]) {
case 0:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100)));
case 1:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100)));
case 2:
return Mono.just(new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100)));
default:
throw new RuntimeException("Poll should not be called after terminal response");
}
});
when(fetchResultOperation.apply(any())).thenReturn(Mono.defer(() -> {
return Mono.just(new CertificateOutput("cert1"));
}));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
CertificateOutput certificateOutput = poller.getFinalResult();
Assertions.assertNotNull(certificateOutput);
Assertions.assertEquals("cert1", certificateOutput.getName());
Assertions.assertEquals(2, invocationCount[0]);
}
@Test
public void getResultShouldNotPollOnCompletedPoller() {
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100));
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100));
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100));
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
when(fetchResultOperation.apply(any())).thenReturn(Mono.defer(() -> {
return Mono.just(new CertificateOutput("cert1"));
}));
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitForCompletion();
Assertions.assertNotNull(pollResponse.getValue());
Assertions.assertEquals(response2.getValue().getResponse(), pollResponse.getValue().getResponse());
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, pollResponse.getStatus());
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
Assertions.assertTrue(true, "A Poll after completion should be called");
return Mono.empty();
});
CertificateOutput certificateOutput = poller.getFinalResult();
Assertions.assertNotNull(certificateOutput);
Assertions.assertEquals("cert1", certificateOutput.getName());
}
@Test
public void waitUntilShouldPollAfterMatchingStatus() {
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
LongRunningOperationStatus matchStatus
= LongRunningOperationStatus.fromString("OTHER_1", false);
int[] invocationCount = new int[1];
invocationCount[0] = -1;
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
invocationCount[0]++;
switch (invocationCount[0]) {
case 0:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100)));
case 1:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100)));
case 2:
return Mono.just(new PollResponse<>(matchStatus,
new Response("1"), Duration.ofMillis(100)));
default:
throw new RuntimeException("Poll should not be called after matching response");
}
});
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitUntil(matchStatus);
Assertions.assertEquals(matchStatus, pollResponse.getStatus());
Assertions.assertEquals(2, invocationCount[0]);
}
@Test
public void verifyExceptionPropagationFromPollingOperationSyncPoller() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
throw new RuntimeException("Polling operation failed!");
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("2")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("3")));
}
};
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
RuntimeException exception = assertThrows(RuntimeException.class,
() -> poller.getFinalResult());
Assertions.assertEquals(exception.getMessage(), "Polling operation failed!");
}
@Test
public void testPollerFluxError() throws InterruptedException {
IllegalArgumentException expectedException = new IllegalArgumentException();
PollerFlux<String, String> pollerFlux = error(expectedException);
CountDownLatch countDownLatch = new CountDownLatch(1);
pollerFlux.subscribe(
response -> Assertions.fail("Did not expect a response"),
ex -> {
countDownLatch.countDown();
Assertions.assertSame(expectedException, ex);
},
() -> Assertions.fail("Did not expect the flux to complete")
);
boolean completed = countDownLatch.await(1, TimeUnit.SECONDS);
Assertions.assertTrue(completed);
}
@Test
public void testSyncPollerError() {
PollerFlux<String, String> pollerFlux = error(new IllegalArgumentException());
Assertions.assertThrows(IllegalArgumentException.class, () -> pollerFlux.getSyncPoller());
}
public static class Response {
private final String response;
public Response(String response) {
this.response = response;
}
public String getResponse() {
return response;
}
@Override
public String toString() {
return "Response: " + response;
}
}
public class CertificateOutput {
String name;
public CertificateOutput(String certName) {
name = certName;
}
public String getName() {
return name;
}
}
} | class PollerTests {
@Mock
private Function<PollingContext<Response>, Mono<Response>> activationOperation;
@Mock
private Function<PollingContext<Response>, Mono<PollResponse<Response>>> activationOperationWithResponse;
@Mock
private Function<PollingContext<Response>, Mono<PollResponse<Response>>> pollOperation;
@Mock
private Function<PollingContext<Response>, Mono<CertificateOutput>> fetchResultOperation;
@Mock
private BiFunction<PollingContext<Response>, PollResponse<Response>, Mono<Response>> cancelOperation;
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Test
public void asyncPollerConstructorPollIntervalZero() {
assertThrows(IllegalArgumentException.class, () -> new PollerFlux<>(
Duration.ZERO,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollIntervalNegative() {
assertThrows(IllegalArgumentException.class, () -> new PollerFlux<>(
Duration.ofSeconds(-1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollIntervalNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
null,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorActivationOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
null,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
null,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorCancelOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
null,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorFetchResultOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
null));
}
@Test
public void subscribeToSpecificOtherOperationStatusTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_1", false),
new Response("2"), retryAfter);
PollResponse<Response> response3 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_2", false),
new Response("3"), retryAfter);
PollResponse<Response> response4 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("4"), retryAfter);
when(activationOperation.apply(any())).thenReturn(Mono.empty());
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2),
Mono.just(response3),
Mono.just(response4));
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response3.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response4.getStatus())
.verifyComplete();
}
@Test
public void noPollingForSynchronouslyCompletedActivationTest() {
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperationWithResponse.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new PollResponse<Response>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("ActivationDone")));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = create(
Duration.ofSeconds(1),
activationOperationWithResponse,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.error(new RuntimeException("Polling shouldn't happen for synchronously completed activation.")));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus()
== LongRunningOperationStatus.SUCCESSFULLY_COMPLETED)
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void ensurePollingForInProgressActivationResponseTest() {
final Duration retryAfter = Duration.ofMillis(100);
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperationWithResponse.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new PollResponse<Response>(IN_PROGRESS,
new Response("ActivationDone")));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = create(
Duration.ofSeconds(1),
activationOperationWithResponse,
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_1", false),
new Response("2"), retryAfter);
PollResponse<Response> response3 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("3"), retryAfter);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2),
Mono.just(response3));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response3.getStatus())
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void subscribeToActivationOnlyOnceTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new Response("ActivationDone"));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.verifyComplete();
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void cancellationCanBeCalledFromOperatorChainTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final List<Object> cancelParameters = new ArrayList<>();
when(cancelOperation.apply(any(), any())).thenAnswer((Answer) invocation -> {
for (Object argument : invocation.getArguments()) {
cancelParameters.add(argument);
}
return Mono.just(new Response("OperationCancelled"));
});
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
@SuppressWarnings({"rawtypes"})
final AsyncPollResponse<Response, CertificateOutput>[] secondAsyncResponse = new AsyncPollResponse[1];
secondAsyncResponse[0] = null;
Response cancelResponse = pollerFlux
.take(2)
.last()
.flatMap((Function<AsyncPollResponse<Response, CertificateOutput>, Mono<Response>>) asyncPollResponse -> {
secondAsyncResponse[0] = asyncPollResponse;
return asyncPollResponse.cancelOperation();
}).block();
Assertions.assertNotNull(cancelResponse);
Assertions.assertTrue(cancelResponse.getResponse().equalsIgnoreCase("OperationCancelled"));
Assertions.assertNotNull(secondAsyncResponse[0]);
Assertions.assertTrue(secondAsyncResponse[0].getValue().getResponse().equalsIgnoreCase("1"));
Assertions.assertEquals(2, cancelParameters.size());
cancelParameters.get(0).equals(activationResponse);
cancelParameters.get(1).equals(response1);
}
@Test
public void getResultCanBeCalledFromOperatorChainTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final List<Object> fetchResultParameters = new ArrayList<>();
when(fetchResultOperation.apply(any())).thenAnswer((Answer) invocation -> {
for (Object argument : invocation.getArguments()) {
fetchResultParameters.add(argument);
}
return Mono.just(new CertificateOutput("LROFinalResult"));
});
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
@SuppressWarnings({"rawtypes"})
final AsyncPollResponse<Response, CertificateOutput>[] terminalAsyncResponse = new AsyncPollResponse[1];
terminalAsyncResponse[0] = null;
CertificateOutput lroResult = pollerFlux
.takeUntil(apr -> apr.getStatus().isComplete())
.last()
.flatMap((Function<AsyncPollResponse<Response, CertificateOutput>, Mono<CertificateOutput>>)
asyncPollResponse -> {
terminalAsyncResponse[0] = asyncPollResponse;
return asyncPollResponse.getFinalResult();
}).block();
Assertions.assertNotNull(lroResult);
Assertions.assertTrue(lroResult.getName().equalsIgnoreCase("LROFinalResult"));
Assertions.assertNotNull(terminalAsyncResponse[0]);
Assertions.assertTrue(terminalAsyncResponse[0].getValue().getResponse().equalsIgnoreCase("2"));
Assertions.assertEquals(1, fetchResultParameters.size());
Assertions.assertTrue(fetchResultParameters.get(0) instanceof PollingContext);
PollingContext<Response> pollingContext = (PollingContext<Response>) fetchResultParameters.get(0);
pollingContext.getActivationResponse().equals(activationResponse);
pollingContext.getLatestResponse().equals(response2);
}
@Test
@Test
public void verifyErrorFromPollingOperation() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
return Mono.just(new PollResponse<Response>(FAILED, new Response("2")));
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("3")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("4")));
}
};
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == FAILED)
.verifyComplete();
}
@Test
public void syncPollerConstructorPollIntervalZero() {
assertThrows(IllegalArgumentException.class, () -> new DefaultSyncPoller<>(
Duration.ZERO,
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollIntervalNegative() {
assertThrows(IllegalArgumentException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(-1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollIntervalNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
null,
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncConstructorActivationOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
null,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
null,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorCancelOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
null,
fetchResultOperation));
}
@Test
public void syncPollerConstructorFetchResultOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
null));
}
@Test
public void syncPollerShouldCallActivationFromConstructor() {
Boolean[] activationCalled = new Boolean[1];
activationCalled[0] = false;
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> {
activationCalled[0] = true;
return Mono.just(new Response("ActivationDone"));
}));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
Assertions.assertTrue(activationCalled[0]);
}
@Test
public void eachPollShouldReceiveLastPollResponse() {
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(new Response("A"))));
when(pollOperation.apply(any())).thenAnswer((Answer) invocation -> {
Assertions.assertEquals(1, invocation.getArguments().length);
Assertions.assertTrue(invocation.getArguments()[0] instanceof PollingContext);
PollingContext<Response> pollingContext = (PollingContext<Response>) invocation.getArguments()[0];
Assertions.assertTrue(pollingContext.getActivationResponse() instanceof PollResponse);
Assertions.assertTrue(pollingContext.getLatestResponse() instanceof PollResponse);
PollResponse<Response> latestResponse = pollingContext.getLatestResponse();
Assertions.assertNotNull(latestResponse);
PollResponse<Response> nextResponse = new PollResponse<>(IN_PROGRESS,
new Response(latestResponse.getValue().toString() + "A"), Duration.ofMillis(100));
return Mono.just(nextResponse);
});
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: AA"));
pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: Response: AAA"));
pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: Response: Response: AAAA"));
}
@Test
public void waitForCompletionShouldReturnTerminalPollResponse() {
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100));
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100));
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100));
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitForCompletion();
Assertions.assertNotNull(pollResponse.getValue());
Assertions.assertEquals(response2.getValue().getResponse(), pollResponse.getValue().getResponse());
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, pollResponse.getStatus());
}
@Test
public void getResultShouldPollUntilCompletionAndFetchResult() {
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
int[] invocationCount = new int[1];
invocationCount[0] = -1;
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
invocationCount[0]++;
switch (invocationCount[0]) {
case 0:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100)));
case 1:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100)));
case 2:
return Mono.just(new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100)));
default:
throw new RuntimeException("Poll should not be called after terminal response");
}
});
when(fetchResultOperation.apply(any())).thenReturn(Mono.defer(() -> {
return Mono.just(new CertificateOutput("cert1"));
}));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
CertificateOutput certificateOutput = poller.getFinalResult();
Assertions.assertNotNull(certificateOutput);
Assertions.assertEquals("cert1", certificateOutput.getName());
Assertions.assertEquals(2, invocationCount[0]);
}
@Test
public void getResultShouldNotPollOnCompletedPoller() {
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100));
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100));
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100));
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
when(fetchResultOperation.apply(any())).thenReturn(Mono.defer(() -> {
return Mono.just(new CertificateOutput("cert1"));
}));
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitForCompletion();
Assertions.assertNotNull(pollResponse.getValue());
Assertions.assertEquals(response2.getValue().getResponse(), pollResponse.getValue().getResponse());
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, pollResponse.getStatus());
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
Assertions.assertTrue(true, "A Poll after completion should be called");
return Mono.empty();
});
CertificateOutput certificateOutput = poller.getFinalResult();
Assertions.assertNotNull(certificateOutput);
Assertions.assertEquals("cert1", certificateOutput.getName());
}
@Test
public void waitUntilShouldPollAfterMatchingStatus() {
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
LongRunningOperationStatus matchStatus
= LongRunningOperationStatus.fromString("OTHER_1", false);
int[] invocationCount = new int[1];
invocationCount[0] = -1;
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
invocationCount[0]++;
switch (invocationCount[0]) {
case 0:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100)));
case 1:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100)));
case 2:
return Mono.just(new PollResponse<>(matchStatus,
new Response("1"), Duration.ofMillis(100)));
default:
throw new RuntimeException("Poll should not be called after matching response");
}
});
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitUntil(matchStatus);
Assertions.assertEquals(matchStatus, pollResponse.getStatus());
Assertions.assertEquals(2, invocationCount[0]);
}
@Test
public void verifyExceptionPropagationFromPollingOperationSyncPoller() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
throw new RuntimeException("Polling operation failed!");
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("2")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("3")));
}
};
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
RuntimeException exception = assertThrows(RuntimeException.class,
() -> poller.getFinalResult());
Assertions.assertEquals(exception.getMessage(), "Polling operation failed!");
}
@Test
public void testPollerFluxError() throws InterruptedException {
IllegalArgumentException expectedException = new IllegalArgumentException();
PollerFlux<String, String> pollerFlux = error(expectedException);
CountDownLatch countDownLatch = new CountDownLatch(1);
pollerFlux.subscribe(
response -> Assertions.fail("Did not expect a response"),
ex -> {
countDownLatch.countDown();
Assertions.assertSame(expectedException, ex);
},
() -> Assertions.fail("Did not expect the flux to complete")
);
boolean completed = countDownLatch.await(1, TimeUnit.SECONDS);
Assertions.assertTrue(completed);
}
@Test
public void testSyncPollerError() {
PollerFlux<String, String> pollerFlux = error(new IllegalArgumentException());
Assertions.assertThrows(IllegalArgumentException.class, () -> pollerFlux.getSyncPoller());
}
public static class Response {
private final String response;
public Response(String response) {
this.response = response;
}
public String getResponse() {
return response;
}
@Override
public String toString() {
return "Response: " + response;
}
}
public class CertificateOutput {
String name;
public CertificateOutput(String certName) {
name = certName;
}
public String getName() {
return name;
}
}
} |
We could use either.. | public void verifyExceptionPropagationFromPollingOperation() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
throw new RuntimeException("Polling operation failed!");
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("2")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("3")));
}
};
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectErrorMessage("Polling operation failed!")
.verify();
} | final AtomicReference<Integer> cnt = new AtomicReference<>(0); | public void verifyExceptionPropagationFromPollingOperation() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
throw new RuntimeException("Polling operation failed!");
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("2")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("3")));
}
};
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectErrorMessage("Polling operation failed!")
.verify();
} | class PollerTests {
@Mock
private Function<PollingContext<Response>, Mono<Response>> activationOperation;
@Mock
private Function<PollingContext<Response>, Mono<PollResponse<Response>>> activationOperationWithResponse;
@Mock
private Function<PollingContext<Response>, Mono<PollResponse<Response>>> pollOperation;
@Mock
private Function<PollingContext<Response>, Mono<CertificateOutput>> fetchResultOperation;
@Mock
private BiFunction<PollingContext<Response>, PollResponse<Response>, Mono<Response>> cancelOperation;
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Test
public void asyncPollerConstructorPollIntervalZero() {
assertThrows(IllegalArgumentException.class, () -> new PollerFlux<>(
Duration.ZERO,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollIntervalNegative() {
assertThrows(IllegalArgumentException.class, () -> new PollerFlux<>(
Duration.ofSeconds(-1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollIntervalNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
null,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorActivationOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
null,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
null,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorCancelOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
null,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorFetchResultOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
null));
}
@Test
public void subscribeToSpecificOtherOperationStatusTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_1", false),
new Response("2"), retryAfter);
PollResponse<Response> response3 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_2", false),
new Response("3"), retryAfter);
PollResponse<Response> response4 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("4"), retryAfter);
when(activationOperation.apply(any())).thenReturn(Mono.empty());
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2),
Mono.just(response3),
Mono.just(response4));
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response3.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response4.getStatus())
.verifyComplete();
}
@Test
public void noPollingForSynchronouslyCompletedActivationTest() {
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperationWithResponse.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new PollResponse<Response>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("ActivationDone")));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = create(
Duration.ofSeconds(1),
activationOperationWithResponse,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.error(new RuntimeException("Polling shouldn't happen for synchronously completed activation.")));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus()
== LongRunningOperationStatus.SUCCESSFULLY_COMPLETED)
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void ensurePollingForInProgressActivationResponseTest() {
final Duration retryAfter = Duration.ofMillis(100);
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperationWithResponse.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new PollResponse<Response>(IN_PROGRESS,
new Response("ActivationDone")));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = create(
Duration.ofSeconds(1),
activationOperationWithResponse,
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_1", false),
new Response("2"), retryAfter);
PollResponse<Response> response3 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("3"), retryAfter);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2),
Mono.just(response3));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response3.getStatus())
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void subscribeToActivationOnlyOnceTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new Response("ActivationDone"));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.verifyComplete();
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void cancellationCanBeCalledFromOperatorChainTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final List<Object> cancelParameters = new ArrayList<>();
when(cancelOperation.apply(any(), any())).thenAnswer((Answer) invocation -> {
for (Object argument : invocation.getArguments()) {
cancelParameters.add(argument);
}
return Mono.just(new Response("OperationCancelled"));
});
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
@SuppressWarnings({"rawtypes"})
final AsyncPollResponse<Response, CertificateOutput>[] secondAsyncResponse = new AsyncPollResponse[1];
secondAsyncResponse[0] = null;
Response cancelResponse = pollerFlux
.take(2)
.last()
.flatMap((Function<AsyncPollResponse<Response, CertificateOutput>, Mono<Response>>) asyncPollResponse -> {
secondAsyncResponse[0] = asyncPollResponse;
return asyncPollResponse.cancelOperation();
}).block();
Assertions.assertNotNull(cancelResponse);
Assertions.assertTrue(cancelResponse.getResponse().equalsIgnoreCase("OperationCancelled"));
Assertions.assertNotNull(secondAsyncResponse[0]);
Assertions.assertTrue(secondAsyncResponse[0].getValue().getResponse().equalsIgnoreCase("1"));
Assertions.assertEquals(2, cancelParameters.size());
cancelParameters.get(0).equals(activationResponse);
cancelParameters.get(1).equals(response1);
}
@Test
public void getResultCanBeCalledFromOperatorChainTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final List<Object> fetchResultParameters = new ArrayList<>();
when(fetchResultOperation.apply(any())).thenAnswer((Answer) invocation -> {
for (Object argument : invocation.getArguments()) {
fetchResultParameters.add(argument);
}
return Mono.just(new CertificateOutput("LROFinalResult"));
});
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
@SuppressWarnings({"rawtypes"})
final AsyncPollResponse<Response, CertificateOutput>[] terminalAsyncResponse = new AsyncPollResponse[1];
terminalAsyncResponse[0] = null;
CertificateOutput lroResult = pollerFlux
.takeUntil(apr -> apr.getStatus().isComplete())
.last()
.flatMap((Function<AsyncPollResponse<Response, CertificateOutput>, Mono<CertificateOutput>>)
asyncPollResponse -> {
terminalAsyncResponse[0] = asyncPollResponse;
return asyncPollResponse.getFinalResult();
}).block();
Assertions.assertNotNull(lroResult);
Assertions.assertTrue(lroResult.getName().equalsIgnoreCase("LROFinalResult"));
Assertions.assertNotNull(terminalAsyncResponse[0]);
Assertions.assertTrue(terminalAsyncResponse[0].getValue().getResponse().equalsIgnoreCase("2"));
Assertions.assertEquals(1, fetchResultParameters.size());
Assertions.assertTrue(fetchResultParameters.get(0) instanceof PollingContext);
PollingContext<Response> pollingContext = (PollingContext<Response>) fetchResultParameters.get(0);
pollingContext.getActivationResponse().equals(activationResponse);
pollingContext.getLatestResponse().equals(response2);
}
@Test
@Test
public void verifyErrorFromPollingOperation() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
return Mono.just(new PollResponse<Response>(FAILED, new Response("2")));
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("3")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("4")));
}
};
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == FAILED)
.verifyComplete();
}
@Test
public void syncPollerConstructorPollIntervalZero() {
assertThrows(IllegalArgumentException.class, () -> new DefaultSyncPoller<>(
Duration.ZERO,
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollIntervalNegative() {
assertThrows(IllegalArgumentException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(-1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollIntervalNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
null,
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncConstructorActivationOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
null,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
null,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorCancelOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
null,
fetchResultOperation));
}
@Test
public void syncPollerConstructorFetchResultOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
null));
}
@Test
public void syncPollerShouldCallActivationFromConstructor() {
Boolean[] activationCalled = new Boolean[1];
activationCalled[0] = false;
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> {
activationCalled[0] = true;
return Mono.just(new Response("ActivationDone"));
}));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
Assertions.assertTrue(activationCalled[0]);
}
@Test
public void eachPollShouldReceiveLastPollResponse() {
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(new Response("A"))));
when(pollOperation.apply(any())).thenAnswer((Answer) invocation -> {
Assertions.assertEquals(1, invocation.getArguments().length);
Assertions.assertTrue(invocation.getArguments()[0] instanceof PollingContext);
PollingContext<Response> pollingContext = (PollingContext<Response>) invocation.getArguments()[0];
Assertions.assertTrue(pollingContext.getActivationResponse() instanceof PollResponse);
Assertions.assertTrue(pollingContext.getLatestResponse() instanceof PollResponse);
PollResponse<Response> latestResponse = pollingContext.getLatestResponse();
Assertions.assertNotNull(latestResponse);
PollResponse<Response> nextResponse = new PollResponse<>(IN_PROGRESS,
new Response(latestResponse.getValue().toString() + "A"), Duration.ofMillis(100));
return Mono.just(nextResponse);
});
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: AA"));
pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: Response: AAA"));
pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: Response: Response: AAAA"));
}
@Test
public void waitForCompletionShouldReturnTerminalPollResponse() {
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100));
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100));
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100));
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitForCompletion();
Assertions.assertNotNull(pollResponse.getValue());
Assertions.assertEquals(response2.getValue().getResponse(), pollResponse.getValue().getResponse());
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, pollResponse.getStatus());
}
@Test
public void getResultShouldPollUntilCompletionAndFetchResult() {
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
int[] invocationCount = new int[1];
invocationCount[0] = -1;
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
invocationCount[0]++;
switch (invocationCount[0]) {
case 0:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100)));
case 1:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100)));
case 2:
return Mono.just(new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100)));
default:
throw new RuntimeException("Poll should not be called after terminal response");
}
});
when(fetchResultOperation.apply(any())).thenReturn(Mono.defer(() -> {
return Mono.just(new CertificateOutput("cert1"));
}));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
CertificateOutput certificateOutput = poller.getFinalResult();
Assertions.assertNotNull(certificateOutput);
Assertions.assertEquals("cert1", certificateOutput.getName());
Assertions.assertEquals(2, invocationCount[0]);
}
@Test
public void getResultShouldNotPollOnCompletedPoller() {
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100));
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100));
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100));
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
when(fetchResultOperation.apply(any())).thenReturn(Mono.defer(() -> {
return Mono.just(new CertificateOutput("cert1"));
}));
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitForCompletion();
Assertions.assertNotNull(pollResponse.getValue());
Assertions.assertEquals(response2.getValue().getResponse(), pollResponse.getValue().getResponse());
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, pollResponse.getStatus());
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
Assertions.assertTrue(true, "A Poll after completion should be called");
return Mono.empty();
});
CertificateOutput certificateOutput = poller.getFinalResult();
Assertions.assertNotNull(certificateOutput);
Assertions.assertEquals("cert1", certificateOutput.getName());
}
@Test
public void waitUntilShouldPollAfterMatchingStatus() {
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
LongRunningOperationStatus matchStatus
= LongRunningOperationStatus.fromString("OTHER_1", false);
int[] invocationCount = new int[1];
invocationCount[0] = -1;
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
invocationCount[0]++;
switch (invocationCount[0]) {
case 0:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100)));
case 1:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100)));
case 2:
return Mono.just(new PollResponse<>(matchStatus,
new Response("1"), Duration.ofMillis(100)));
default:
throw new RuntimeException("Poll should not be called after matching response");
}
});
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitUntil(matchStatus);
Assertions.assertEquals(matchStatus, pollResponse.getStatus());
Assertions.assertEquals(2, invocationCount[0]);
}
@Test
public void verifyExceptionPropagationFromPollingOperationSyncPoller() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
throw new RuntimeException("Polling operation failed!");
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("2")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("3")));
}
};
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
RuntimeException exception = assertThrows(RuntimeException.class,
() -> poller.getFinalResult());
Assertions.assertEquals(exception.getMessage(), "Polling operation failed!");
}
@Test
public void testPollerFluxError() throws InterruptedException {
IllegalArgumentException expectedException = new IllegalArgumentException();
PollerFlux<String, String> pollerFlux = error(expectedException);
CountDownLatch countDownLatch = new CountDownLatch(1);
pollerFlux.subscribe(
response -> Assertions.fail("Did not expect a response"),
ex -> {
countDownLatch.countDown();
Assertions.assertSame(expectedException, ex);
},
() -> Assertions.fail("Did not expect the flux to complete")
);
boolean completed = countDownLatch.await(1, TimeUnit.SECONDS);
Assertions.assertTrue(completed);
}
@Test
public void testSyncPollerError() {
PollerFlux<String, String> pollerFlux = error(new IllegalArgumentException());
Assertions.assertThrows(IllegalArgumentException.class, () -> pollerFlux.getSyncPoller());
}
public static class Response {
private final String response;
public Response(String response) {
this.response = response;
}
public String getResponse() {
return response;
}
@Override
public String toString() {
return "Response: " + response;
}
}
public class CertificateOutput {
String name;
public CertificateOutput(String certName) {
name = certName;
}
public String getName() {
return name;
}
}
} | class PollerTests {
@Mock
private Function<PollingContext<Response>, Mono<Response>> activationOperation;
@Mock
private Function<PollingContext<Response>, Mono<PollResponse<Response>>> activationOperationWithResponse;
@Mock
private Function<PollingContext<Response>, Mono<PollResponse<Response>>> pollOperation;
@Mock
private Function<PollingContext<Response>, Mono<CertificateOutput>> fetchResultOperation;
@Mock
private BiFunction<PollingContext<Response>, PollResponse<Response>, Mono<Response>> cancelOperation;
@BeforeEach
public void beforeTest() {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void afterTest() {
Mockito.framework().clearInlineMocks();
}
@Test
public void asyncPollerConstructorPollIntervalZero() {
assertThrows(IllegalArgumentException.class, () -> new PollerFlux<>(
Duration.ZERO,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollIntervalNegative() {
assertThrows(IllegalArgumentException.class, () -> new PollerFlux<>(
Duration.ofSeconds(-1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollIntervalNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
null,
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorActivationOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
null,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorPollOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
null,
cancelOperation,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorCancelOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
null,
fetchResultOperation));
}
@Test
public void asyncPollerConstructorFetchResultOperationNull() {
assertThrows(NullPointerException.class, () -> new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
null));
}
@Test
public void subscribeToSpecificOtherOperationStatusTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_1", false),
new Response("2"), retryAfter);
PollResponse<Response> response3 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_2", false),
new Response("3"), retryAfter);
PollResponse<Response> response4 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("4"), retryAfter);
when(activationOperation.apply(any())).thenReturn(Mono.empty());
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2),
Mono.just(response3),
Mono.just(response4));
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response3.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response4.getStatus())
.verifyComplete();
}
@Test
public void noPollingForSynchronouslyCompletedActivationTest() {
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperationWithResponse.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new PollResponse<Response>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("ActivationDone")));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = create(
Duration.ofSeconds(1),
activationOperationWithResponse,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.error(new RuntimeException("Polling shouldn't happen for synchronously completed activation.")));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus()
== LongRunningOperationStatus.SUCCESSFULLY_COMPLETED)
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void ensurePollingForInProgressActivationResponseTest() {
final Duration retryAfter = Duration.ofMillis(100);
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperationWithResponse.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new PollResponse<Response>(IN_PROGRESS,
new Response("ActivationDone")));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = create(
Duration.ofSeconds(1),
activationOperationWithResponse,
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(
LongRunningOperationStatus.fromString("OTHER_1", false),
new Response("2"), retryAfter);
PollResponse<Response> response3 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("3"), retryAfter);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2),
Mono.just(response3));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response3.getStatus())
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void subscribeToActivationOnlyOnceTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
int[] activationCallCount = new int[1];
activationCallCount[0] = 0;
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> {
activationCallCount[0]++;
return Mono.just(new Response("ActivationDone"));
}));
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.verifyComplete();
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response0.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response1.getStatus())
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == response2.getStatus())
.verifyComplete();
Assertions.assertEquals(1, activationCallCount[0]);
}
@Test
public void cancellationCanBeCalledFromOperatorChainTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final List<Object> cancelParameters = new ArrayList<>();
when(cancelOperation.apply(any(), any())).thenAnswer((Answer) invocation -> {
for (Object argument : invocation.getArguments()) {
cancelParameters.add(argument);
}
return Mono.just(new Response("OperationCancelled"));
});
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
@SuppressWarnings({"rawtypes"})
final AsyncPollResponse<Response, CertificateOutput>[] secondAsyncResponse = new AsyncPollResponse[1];
secondAsyncResponse[0] = null;
Response cancelResponse = pollerFlux
.take(2)
.last()
.flatMap((Function<AsyncPollResponse<Response, CertificateOutput>, Mono<Response>>) asyncPollResponse -> {
secondAsyncResponse[0] = asyncPollResponse;
return asyncPollResponse.cancelOperation();
}).block();
Assertions.assertNotNull(cancelResponse);
Assertions.assertTrue(cancelResponse.getResponse().equalsIgnoreCase("OperationCancelled"));
Assertions.assertNotNull(secondAsyncResponse[0]);
Assertions.assertTrue(secondAsyncResponse[0].getValue().getResponse().equalsIgnoreCase("1"));
Assertions.assertEquals(2, cancelParameters.size());
cancelParameters.get(0).equals(activationResponse);
cancelParameters.get(1).equals(response1);
}
@Test
public void getResultCanBeCalledFromOperatorChainTest() {
final Duration retryAfter = Duration.ofMillis(100);
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), retryAfter);
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), retryAfter);
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), retryAfter);
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final List<Object> fetchResultParameters = new ArrayList<>();
when(fetchResultOperation.apply(any())).thenAnswer((Answer) invocation -> {
for (Object argument : invocation.getArguments()) {
fetchResultParameters.add(argument);
}
return Mono.just(new CertificateOutput("LROFinalResult"));
});
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
@SuppressWarnings({"rawtypes"})
final AsyncPollResponse<Response, CertificateOutput>[] terminalAsyncResponse = new AsyncPollResponse[1];
terminalAsyncResponse[0] = null;
CertificateOutput lroResult = pollerFlux
.takeUntil(apr -> apr.getStatus().isComplete())
.last()
.flatMap((Function<AsyncPollResponse<Response, CertificateOutput>, Mono<CertificateOutput>>)
asyncPollResponse -> {
terminalAsyncResponse[0] = asyncPollResponse;
return asyncPollResponse.getFinalResult();
}).block();
Assertions.assertNotNull(lroResult);
Assertions.assertTrue(lroResult.getName().equalsIgnoreCase("LROFinalResult"));
Assertions.assertNotNull(terminalAsyncResponse[0]);
Assertions.assertTrue(terminalAsyncResponse[0].getValue().getResponse().equalsIgnoreCase("2"));
Assertions.assertEquals(1, fetchResultParameters.size());
Assertions.assertTrue(fetchResultParameters.get(0) instanceof PollingContext);
PollingContext<Response> pollingContext = (PollingContext<Response>) fetchResultParameters.get(0);
pollingContext.getActivationResponse().equals(activationResponse);
pollingContext.getLatestResponse().equals(response2);
}
@Test
@Test
public void verifyErrorFromPollingOperation() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
return Mono.just(new PollResponse<Response>(FAILED, new Response("2")));
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("3")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("4")));
}
};
PollerFlux<Response, CertificateOutput> pollerFlux = new PollerFlux<>(
Duration.ofSeconds(1),
activationOperation,
pollOperation,
cancelOperation,
fetchResultOperation);
StepVerifier.create(pollerFlux)
.expectSubscription()
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == IN_PROGRESS)
.expectNextMatches(asyncPollResponse -> asyncPollResponse.getStatus() == FAILED)
.verifyComplete();
}
@Test
public void syncPollerConstructorPollIntervalZero() {
assertThrows(IllegalArgumentException.class, () -> new DefaultSyncPoller<>(
Duration.ZERO,
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollIntervalNegative() {
assertThrows(IllegalArgumentException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(-1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollIntervalNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
null,
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncConstructorActivationOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
null,
pollOperation,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorPollOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
null,
cancelOperation,
fetchResultOperation));
}
@Test
public void syncPollerConstructorCancelOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
null,
fetchResultOperation));
}
@Test
public void syncPollerConstructorFetchResultOperationNull() {
assertThrows(NullPointerException.class, () -> new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
null));
}
@Test
public void syncPollerShouldCallActivationFromConstructor() {
Boolean[] activationCalled = new Boolean[1];
activationCalled[0] = false;
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> {
activationCalled[0] = true;
return Mono.just(new Response("ActivationDone"));
}));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
Assertions.assertTrue(activationCalled[0]);
}
@Test
public void eachPollShouldReceiveLastPollResponse() {
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(new Response("A"))));
when(pollOperation.apply(any())).thenAnswer((Answer) invocation -> {
Assertions.assertEquals(1, invocation.getArguments().length);
Assertions.assertTrue(invocation.getArguments()[0] instanceof PollingContext);
PollingContext<Response> pollingContext = (PollingContext<Response>) invocation.getArguments()[0];
Assertions.assertTrue(pollingContext.getActivationResponse() instanceof PollResponse);
Assertions.assertTrue(pollingContext.getLatestResponse() instanceof PollResponse);
PollResponse<Response> latestResponse = pollingContext.getLatestResponse();
Assertions.assertNotNull(latestResponse);
PollResponse<Response> nextResponse = new PollResponse<>(IN_PROGRESS,
new Response(latestResponse.getValue().toString() + "A"), Duration.ofMillis(100));
return Mono.just(nextResponse);
});
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: AA"));
pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: Response: AAA"));
pollResponse = poller.poll();
Assertions.assertNotNull(pollResponse);
Assertions.assertNotNull(pollResponse.getValue().getResponse());
Assertions.assertTrue(pollResponse.getValue()
.getResponse()
.equalsIgnoreCase("Response: Response: Response: AAAA"));
}
@Test
public void waitForCompletionShouldReturnTerminalPollResponse() {
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100));
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100));
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100));
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitForCompletion();
Assertions.assertNotNull(pollResponse.getValue());
Assertions.assertEquals(response2.getValue().getResponse(), pollResponse.getValue().getResponse());
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, pollResponse.getStatus());
}
@Test
public void getResultShouldPollUntilCompletionAndFetchResult() {
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
int[] invocationCount = new int[1];
invocationCount[0] = -1;
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
invocationCount[0]++;
switch (invocationCount[0]) {
case 0:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100)));
case 1:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100)));
case 2:
return Mono.just(new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100)));
default:
throw new RuntimeException("Poll should not be called after terminal response");
}
});
when(fetchResultOperation.apply(any())).thenReturn(Mono.defer(() -> {
return Mono.just(new CertificateOutput("cert1"));
}));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
CertificateOutput certificateOutput = poller.getFinalResult();
Assertions.assertNotNull(certificateOutput);
Assertions.assertEquals("cert1", certificateOutput.getName());
Assertions.assertEquals(2, invocationCount[0]);
}
@Test
public void getResultShouldNotPollOnCompletedPoller() {
PollResponse<Response> response0 = new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100));
PollResponse<Response> response1 = new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100));
PollResponse<Response> response2 = new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED,
new Response("2"), Duration.ofMillis(100));
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
when(fetchResultOperation.apply(any())).thenReturn(Mono.defer(() -> {
return Mono.just(new CertificateOutput("cert1"));
}));
when(pollOperation.apply(any())).thenReturn(
Mono.just(response0),
Mono.just(response1),
Mono.just(response2));
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitForCompletion();
Assertions.assertNotNull(pollResponse.getValue());
Assertions.assertEquals(response2.getValue().getResponse(), pollResponse.getValue().getResponse());
Assertions.assertEquals(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, pollResponse.getStatus());
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
Assertions.assertTrue(true, "A Poll after completion should be called");
return Mono.empty();
});
CertificateOutput certificateOutput = poller.getFinalResult();
Assertions.assertNotNull(certificateOutput);
Assertions.assertEquals("cert1", certificateOutput.getName());
}
@Test
public void waitUntilShouldPollAfterMatchingStatus() {
final Response activationResponse = new Response("Activated");
when(activationOperation.apply(any())).thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
LongRunningOperationStatus matchStatus
= LongRunningOperationStatus.fromString("OTHER_1", false);
int[] invocationCount = new int[1];
invocationCount[0] = -1;
when(pollOperation.apply(any())).thenAnswer((Answer<Mono<PollResponse<Response>>>) invocationOnMock -> {
invocationCount[0]++;
switch (invocationCount[0]) {
case 0:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("0"), Duration.ofMillis(100)));
case 1:
return Mono.just(new PollResponse<>(IN_PROGRESS,
new Response("1"), Duration.ofMillis(100)));
case 2:
return Mono.just(new PollResponse<>(matchStatus,
new Response("1"), Duration.ofMillis(100)));
default:
throw new RuntimeException("Poll should not be called after matching response");
}
});
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
PollResponse<Response> pollResponse = poller.waitUntil(matchStatus);
Assertions.assertEquals(matchStatus, pollResponse.getStatus());
Assertions.assertEquals(2, invocationCount[0]);
}
@Test
public void verifyExceptionPropagationFromPollingOperationSyncPoller() {
final Response activationResponse = new Response("Foo");
when(activationOperation.apply(any()))
.thenReturn(Mono.defer(() -> Mono.just(activationResponse)));
final AtomicReference<Integer> cnt = new AtomicReference<>(0);
pollOperation = (pollingContext) -> {
cnt.getAndSet(cnt.get() + 1);
if (cnt.get() <= 2) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("1")));
} else if (cnt.get() == 3) {
throw new RuntimeException("Polling operation failed!");
} else if (cnt.get() == 4) {
return Mono.just(new PollResponse<Response>(IN_PROGRESS, new Response("2")));
} else {
return Mono.just(new PollResponse<Response>(SUCCESSFULLY_COMPLETED, new Response("3")));
}
};
SyncPoller<Response, CertificateOutput> poller = new DefaultSyncPoller<>(
Duration.ofSeconds(1),
cxt -> new PollResponse<>(LongRunningOperationStatus.NOT_STARTED,
activationOperation.apply(cxt).block()),
pollOperation,
cancelOperation,
fetchResultOperation);
RuntimeException exception = assertThrows(RuntimeException.class,
() -> poller.getFinalResult());
Assertions.assertEquals(exception.getMessage(), "Polling operation failed!");
}
@Test
public void testPollerFluxError() throws InterruptedException {
IllegalArgumentException expectedException = new IllegalArgumentException();
PollerFlux<String, String> pollerFlux = error(expectedException);
CountDownLatch countDownLatch = new CountDownLatch(1);
pollerFlux.subscribe(
response -> Assertions.fail("Did not expect a response"),
ex -> {
countDownLatch.countDown();
Assertions.assertSame(expectedException, ex);
},
() -> Assertions.fail("Did not expect the flux to complete")
);
boolean completed = countDownLatch.await(1, TimeUnit.SECONDS);
Assertions.assertTrue(completed);
}
@Test
public void testSyncPollerError() {
PollerFlux<String, String> pollerFlux = error(new IllegalArgumentException());
Assertions.assertThrows(IllegalArgumentException.class, () -> pollerFlux.getSyncPoller());
}
public static class Response {
private final String response;
public Response(String response) {
this.response = response;
}
public String getResponse() {
return response;
}
@Override
public String toString() {
return "Response: " + response;
}
}
public class CertificateOutput {
String name;
public CertificateOutput(String certName) {
name = certName;
}
public String getName() {
return name;
}
}
} |
why do we need this `buffer(2)` ? | public Object execute(DocumentQuery query, Class<?> type, String container) {
return operations.find(query, type, container)
.buffer(2)
.map((vals) -> {
if (vals.size() > 1) {
throw new CosmosAccessException("Too many results - Expected Mono<"
+ returnedType.getReturnedType()
+ "> but query returned multiple results");
}
return vals.iterator().next();
});
} | .buffer(2) | public Object execute(DocumentQuery query, Class<?> type, String container) {
return operations.getContainerName(type);
} | class ContainerExecution implements ReactiveCosmosQueryExecution {
private final ReactiveCosmosOperations operations;
public ContainerExecution(ReactiveCosmosOperations operations) {
this.operations = operations;
}
@Override
} | class ContainerExecution implements ReactiveCosmosQueryExecution {
private final ReactiveCosmosOperations operations;
public ContainerExecution(ReactiveCosmosOperations operations) {
this.operations = operations;
}
@Override
} |
We want to ensure only a single result is returned by the query. However, the results are returned as a stream. This allows us to check that the stream only contains a single item without retrieving all of the streams contents. | public Object execute(DocumentQuery query, Class<?> type, String container) {
return operations.find(query, type, container)
.buffer(2)
.map((vals) -> {
if (vals.size() > 1) {
throw new CosmosAccessException("Too many results - Expected Mono<"
+ returnedType.getReturnedType()
+ "> but query returned multiple results");
}
return vals.iterator().next();
});
} | .buffer(2) | public Object execute(DocumentQuery query, Class<?> type, String container) {
return operations.getContainerName(type);
} | class ContainerExecution implements ReactiveCosmosQueryExecution {
private final ReactiveCosmosOperations operations;
public ContainerExecution(ReactiveCosmosOperations operations) {
this.operations = operations;
}
@Override
} | class ContainerExecution implements ReactiveCosmosQueryExecution {
private final ReactiveCosmosOperations operations;
public ContainerExecution(ReactiveCosmosOperations operations) {
this.operations = operations;
}
@Override
} |
Sounds good! | public Object execute(DocumentQuery query, Class<?> type, String container) {
return operations.find(query, type, container)
.buffer(2)
.map((vals) -> {
if (vals.size() > 1) {
throw new CosmosAccessException("Too many results - Expected Mono<"
+ returnedType.getReturnedType()
+ "> but query returned multiple results");
}
return vals.iterator().next();
});
} | .buffer(2) | public Object execute(DocumentQuery query, Class<?> type, String container) {
return operations.getContainerName(type);
} | class ContainerExecution implements ReactiveCosmosQueryExecution {
private final ReactiveCosmosOperations operations;
public ContainerExecution(ReactiveCosmosOperations operations) {
this.operations = operations;
}
@Override
} | class ContainerExecution implements ReactiveCosmosQueryExecution {
private final ReactiveCosmosOperations operations;
public ContainerExecution(ReactiveCosmosOperations operations) {
this.operations = operations;
}
@Override
} |
Do we need to consider (now or later) extending the pattern-matching algorithm to cover standard java networking properties formats? Reactor-netty claims to cover those cases [here](https://github.com/reactor/reactor-netty/blob/a07a8b95488ff2960ea551f4f205d3c7e6277734/src/main/java/reactor/netty/transport/ProxyProvider.java#L323). | private boolean shouldApplyProxy(SocketAddress socketAddress) {
if (nonProxyHostsPattern == null) {
return true;
}
if (!(socketAddress instanceof InetSocketAddress)) {
return true;
}
InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress;
return !nonProxyHostsPattern.matcher(inetSocketAddress.getHostName()).matches();
} | return !nonProxyHostsPattern.matcher(inetSocketAddress.getHostName()).matches(); | private boolean shouldApplyProxy(SocketAddress socketAddress) {
if (nonProxyHostsPattern == null) {
return true;
}
if (!(socketAddress instanceof InetSocketAddress)) {
return true;
}
InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress;
return !nonProxyHostsPattern.matcher(inetSocketAddress.getHostName()).matches();
} | class DeferredHttpProxyProvider implements Function<Bootstrap, BiConsumer<ConnectionObserver, Channel>> {
private final AuthorizationChallengeHandler challengeHandler;
private final AtomicReference<ChallengeHolder> proxyChallengeHolderReference;
private final InetSocketAddress proxyAddress;
private final String username;
private final String password;
private final String nonProxyHosts;
private final Pattern nonProxyHostsPattern;
public DeferredHttpProxyProvider(AuthorizationChallengeHandler challengeHandler,
AtomicReference<ChallengeHolder> proxyChallengeHolderReference, ProxyOptions proxyOptions) {
this.challengeHandler = challengeHandler;
this.proxyChallengeHolderReference = proxyChallengeHolderReference;
this.proxyAddress = proxyOptions.getAddress();
this.username = proxyOptions.getUsername();
this.password = proxyOptions.getPassword();
this.nonProxyHosts = proxyOptions.getNonProxyHosts();
this.nonProxyHostsPattern = (nonProxyHosts == null)
? null
: Pattern.compile(nonProxyHosts, Pattern.CASE_INSENSITIVE);
}
@Override
public BiConsumer<ConnectionObserver, Channel> apply(Bootstrap bootstrap) {
return ((connectionObserver, channel) -> {
if (shouldApplyProxy(bootstrap.config().remoteAddress())) {
channel.pipeline()
.addFirst(NettyPipeline.ProxyHandler, new HttpProxyHandler(proxyAddress, challengeHandler,
proxyChallengeHolderReference))
.addLast("azure.proxy.exceptionHandler", new HttpProxyExceptionHandler());
}
});
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DeferredHttpProxyProvider)) {
return false;
}
DeferredHttpProxyProvider other = (DeferredHttpProxyProvider) o;
return Objects.equals(username, other.username)
&& Objects.equals(password, other.password)
&& Objects.equals(proxyAddress, other.proxyAddress)
&& Objects.equals(nonProxyHosts, other.nonProxyHosts);
}
@Override
public int hashCode() {
return Objects.hash(proxyAddress, password, proxyAddress, nonProxyHosts);
}
} | class DeferredHttpProxyProvider implements Function<Bootstrap, BiConsumer<ConnectionObserver, Channel>> {
private final AuthorizationChallengeHandler challengeHandler;
private final AtomicReference<ChallengeHolder> proxyChallengeHolderReference;
private final InetSocketAddress proxyAddress;
private final String username;
private final String password;
private final String nonProxyHosts;
private final Pattern nonProxyHostsPattern;
public DeferredHttpProxyProvider(AuthorizationChallengeHandler challengeHandler,
AtomicReference<ChallengeHolder> proxyChallengeHolderReference, ProxyOptions proxyOptions) {
this.challengeHandler = challengeHandler;
this.proxyChallengeHolderReference = proxyChallengeHolderReference;
this.proxyAddress = proxyOptions.getAddress();
this.username = proxyOptions.getUsername();
this.password = proxyOptions.getPassword();
this.nonProxyHosts = proxyOptions.getNonProxyHosts();
this.nonProxyHostsPattern = (nonProxyHosts == null)
? null
: Pattern.compile(nonProxyHosts, Pattern.CASE_INSENSITIVE);
}
@Override
public BiConsumer<ConnectionObserver, Channel> apply(Bootstrap bootstrap) {
return ((connectionObserver, channel) -> {
if (shouldApplyProxy(bootstrap.config().remoteAddress())) {
channel.pipeline()
.addFirst(NettyPipeline.ProxyHandler, new HttpProxyHandler(proxyAddress, challengeHandler,
proxyChallengeHolderReference))
.addLast("azure.proxy.exceptionHandler", new HttpProxyExceptionHandler());
}
});
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DeferredHttpProxyProvider)) {
return false;
}
DeferredHttpProxyProvider other = (DeferredHttpProxyProvider) o;
return Objects.equals(username, other.username)
&& Objects.equals(password, other.password)
&& Objects.equals(proxyAddress, other.proxyAddress)
&& Objects.equals(nonProxyHosts, other.nonProxyHosts);
}
@Override
public int hashCode() {
return Objects.hash(proxyAddress, password, proxyAddress, nonProxyHosts);
}
} |
There is more work that is required around non proxy hosts, should roll that into this issue: https://github.com/Azure/azure-sdk-for-java/issues/7795 | private boolean shouldApplyProxy(SocketAddress socketAddress) {
if (nonProxyHostsPattern == null) {
return true;
}
if (!(socketAddress instanceof InetSocketAddress)) {
return true;
}
InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress;
return !nonProxyHostsPattern.matcher(inetSocketAddress.getHostName()).matches();
} | return !nonProxyHostsPattern.matcher(inetSocketAddress.getHostName()).matches(); | private boolean shouldApplyProxy(SocketAddress socketAddress) {
if (nonProxyHostsPattern == null) {
return true;
}
if (!(socketAddress instanceof InetSocketAddress)) {
return true;
}
InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress;
return !nonProxyHostsPattern.matcher(inetSocketAddress.getHostName()).matches();
} | class DeferredHttpProxyProvider implements Function<Bootstrap, BiConsumer<ConnectionObserver, Channel>> {
private final AuthorizationChallengeHandler challengeHandler;
private final AtomicReference<ChallengeHolder> proxyChallengeHolderReference;
private final InetSocketAddress proxyAddress;
private final String username;
private final String password;
private final String nonProxyHosts;
private final Pattern nonProxyHostsPattern;
public DeferredHttpProxyProvider(AuthorizationChallengeHandler challengeHandler,
AtomicReference<ChallengeHolder> proxyChallengeHolderReference, ProxyOptions proxyOptions) {
this.challengeHandler = challengeHandler;
this.proxyChallengeHolderReference = proxyChallengeHolderReference;
this.proxyAddress = proxyOptions.getAddress();
this.username = proxyOptions.getUsername();
this.password = proxyOptions.getPassword();
this.nonProxyHosts = proxyOptions.getNonProxyHosts();
this.nonProxyHostsPattern = (nonProxyHosts == null)
? null
: Pattern.compile(nonProxyHosts, Pattern.CASE_INSENSITIVE);
}
@Override
public BiConsumer<ConnectionObserver, Channel> apply(Bootstrap bootstrap) {
return ((connectionObserver, channel) -> {
if (shouldApplyProxy(bootstrap.config().remoteAddress())) {
channel.pipeline()
.addFirst(NettyPipeline.ProxyHandler, new HttpProxyHandler(proxyAddress, challengeHandler,
proxyChallengeHolderReference))
.addLast("azure.proxy.exceptionHandler", new HttpProxyExceptionHandler());
}
});
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DeferredHttpProxyProvider)) {
return false;
}
DeferredHttpProxyProvider other = (DeferredHttpProxyProvider) o;
return Objects.equals(username, other.username)
&& Objects.equals(password, other.password)
&& Objects.equals(proxyAddress, other.proxyAddress)
&& Objects.equals(nonProxyHosts, other.nonProxyHosts);
}
@Override
public int hashCode() {
return Objects.hash(proxyAddress, password, proxyAddress, nonProxyHosts);
}
} | class DeferredHttpProxyProvider implements Function<Bootstrap, BiConsumer<ConnectionObserver, Channel>> {
private final AuthorizationChallengeHandler challengeHandler;
private final AtomicReference<ChallengeHolder> proxyChallengeHolderReference;
private final InetSocketAddress proxyAddress;
private final String username;
private final String password;
private final String nonProxyHosts;
private final Pattern nonProxyHostsPattern;
public DeferredHttpProxyProvider(AuthorizationChallengeHandler challengeHandler,
AtomicReference<ChallengeHolder> proxyChallengeHolderReference, ProxyOptions proxyOptions) {
this.challengeHandler = challengeHandler;
this.proxyChallengeHolderReference = proxyChallengeHolderReference;
this.proxyAddress = proxyOptions.getAddress();
this.username = proxyOptions.getUsername();
this.password = proxyOptions.getPassword();
this.nonProxyHosts = proxyOptions.getNonProxyHosts();
this.nonProxyHostsPattern = (nonProxyHosts == null)
? null
: Pattern.compile(nonProxyHosts, Pattern.CASE_INSENSITIVE);
}
@Override
public BiConsumer<ConnectionObserver, Channel> apply(Bootstrap bootstrap) {
return ((connectionObserver, channel) -> {
if (shouldApplyProxy(bootstrap.config().remoteAddress())) {
channel.pipeline()
.addFirst(NettyPipeline.ProxyHandler, new HttpProxyHandler(proxyAddress, challengeHandler,
proxyChallengeHolderReference))
.addLast("azure.proxy.exceptionHandler", new HttpProxyExceptionHandler());
}
});
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DeferredHttpProxyProvider)) {
return false;
}
DeferredHttpProxyProvider other = (DeferredHttpProxyProvider) o;
return Objects.equals(username, other.username)
&& Objects.equals(password, other.password)
&& Objects.equals(proxyAddress, other.proxyAddress)
&& Objects.equals(nonProxyHosts, other.nonProxyHosts);
}
@Override
public int hashCode() {
return Objects.hash(proxyAddress, password, proxyAddress, nonProxyHosts);
}
} |
Can we add a unit test for this in `CosmosEntityInformationUnitTest.java` class ? | private Field getIdField(Class<?> domainType) {
final Field idField;
final List<Field> fields = FieldUtils.getFieldsListWithAnnotation(domainType, Id.class);
if (fields.isEmpty()) {
idField = ReflectionUtils.findField(getJavaType(), Constants.ID_PROPERTY_NAME);
} else if (fields.size() == 1) {
idField = fields.get(0);
} else {
throw new IllegalArgumentException("only one field with @Id annotation!");
}
if (idField == null) {
throw new IllegalArgumentException("domain should contain @Id field or field named id");
} else if (idField.getType() != String.class
&& idField.getType() != Integer.class
&& idField.getType() != int.class
&& idField.getType() != Long.class
&& idField.getType() != long.class) {
throw new IllegalArgumentException("type of id field must be String, Integer or Long");
}
return idField;
} | && idField.getType() != int.class | private Field getIdField(Class<?> domainType) {
final Field idField;
final List<Field> fields = FieldUtils.getFieldsListWithAnnotation(domainType, Id.class);
if (fields.isEmpty()) {
idField = ReflectionUtils.findField(getJavaType(), Constants.ID_PROPERTY_NAME);
} else if (fields.size() == 1) {
idField = fields.get(0);
} else {
throw new IllegalArgumentException("only one field with @Id annotation!");
}
if (idField == null) {
throw new IllegalArgumentException("domain should contain @Id field or field named id");
} else if (idField.getType() != String.class
&& idField.getType() != Integer.class
&& idField.getType() != int.class
&& idField.getType() != Long.class
&& idField.getType() != long.class) {
throw new IllegalArgumentException("type of id field must be String, Integer or Long");
}
return idField;
} | class of id type
*/
@SuppressWarnings("unchecked")
public Class<ID> getIdType() {
return (Class<ID>) id.getType();
} | class of id type
*/
@SuppressWarnings("unchecked")
public Class<ID> getIdType() {
return (Class<ID>) id.getType();
} |
Added in CosmosEntityInformationUnitTest, please check | private Field getIdField(Class<?> domainType) {
final Field idField;
final List<Field> fields = FieldUtils.getFieldsListWithAnnotation(domainType, Id.class);
if (fields.isEmpty()) {
idField = ReflectionUtils.findField(getJavaType(), Constants.ID_PROPERTY_NAME);
} else if (fields.size() == 1) {
idField = fields.get(0);
} else {
throw new IllegalArgumentException("only one field with @Id annotation!");
}
if (idField == null) {
throw new IllegalArgumentException("domain should contain @Id field or field named id");
} else if (idField.getType() != String.class
&& idField.getType() != Integer.class
&& idField.getType() != int.class
&& idField.getType() != Long.class
&& idField.getType() != long.class) {
throw new IllegalArgumentException("type of id field must be String, Integer or Long");
}
return idField;
} | && idField.getType() != int.class | private Field getIdField(Class<?> domainType) {
final Field idField;
final List<Field> fields = FieldUtils.getFieldsListWithAnnotation(domainType, Id.class);
if (fields.isEmpty()) {
idField = ReflectionUtils.findField(getJavaType(), Constants.ID_PROPERTY_NAME);
} else if (fields.size() == 1) {
idField = fields.get(0);
} else {
throw new IllegalArgumentException("only one field with @Id annotation!");
}
if (idField == null) {
throw new IllegalArgumentException("domain should contain @Id field or field named id");
} else if (idField.getType() != String.class
&& idField.getType() != Integer.class
&& idField.getType() != int.class
&& idField.getType() != Long.class
&& idField.getType() != long.class) {
throw new IllegalArgumentException("type of id field must be String, Integer or Long");
}
return idField;
} | class of id type
*/
@SuppressWarnings("unchecked")
public Class<ID> getIdType() {
return (Class<ID>) id.getType();
} | class of id type
*/
@SuppressWarnings("unchecked")
public Class<ID> getIdType() {
return (Class<ID>) id.getType();
} |
Thanks. | private Field getIdField(Class<?> domainType) {
final Field idField;
final List<Field> fields = FieldUtils.getFieldsListWithAnnotation(domainType, Id.class);
if (fields.isEmpty()) {
idField = ReflectionUtils.findField(getJavaType(), Constants.ID_PROPERTY_NAME);
} else if (fields.size() == 1) {
idField = fields.get(0);
} else {
throw new IllegalArgumentException("only one field with @Id annotation!");
}
if (idField == null) {
throw new IllegalArgumentException("domain should contain @Id field or field named id");
} else if (idField.getType() != String.class
&& idField.getType() != Integer.class
&& idField.getType() != int.class
&& idField.getType() != Long.class
&& idField.getType() != long.class) {
throw new IllegalArgumentException("type of id field must be String, Integer or Long");
}
return idField;
} | && idField.getType() != int.class | private Field getIdField(Class<?> domainType) {
final Field idField;
final List<Field> fields = FieldUtils.getFieldsListWithAnnotation(domainType, Id.class);
if (fields.isEmpty()) {
idField = ReflectionUtils.findField(getJavaType(), Constants.ID_PROPERTY_NAME);
} else if (fields.size() == 1) {
idField = fields.get(0);
} else {
throw new IllegalArgumentException("only one field with @Id annotation!");
}
if (idField == null) {
throw new IllegalArgumentException("domain should contain @Id field or field named id");
} else if (idField.getType() != String.class
&& idField.getType() != Integer.class
&& idField.getType() != int.class
&& idField.getType() != Long.class
&& idField.getType() != long.class) {
throw new IllegalArgumentException("type of id field must be String, Integer or Long");
}
return idField;
} | class of id type
*/
@SuppressWarnings("unchecked")
public Class<ID> getIdType() {
return (Class<ID>) id.getType();
} | class of id type
*/
@SuppressWarnings("unchecked")
public Class<ID> getIdType() {
return (Class<ID>) id.getType();
} |
It would be good to add some samples that show what Context is used for. | public static void main(String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
List<RecognizedForm> formsWithLabeledModel =
client.beginRecognizeCustomForms(
new FileInputStream(analyzeFile), analyzeFile.length(),
"{labeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)), Context.NONE)
.getFinalResult();
List<RecognizedForm> formsWithUnlabeledModel =
client.beginRecognizeCustomForms(new FileInputStream(analyzeFile), analyzeFile.length(),
"{unlabeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)), Context.NONE)
.getFinalResult();
System.out.println("--------Recognizing forms with labeled custom model--------");
formsWithLabeledModel.forEach(labeledForm -> labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "MerchantName".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
}));
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
formsWithUnlabeledModel.forEach(unLabeledForm -> unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), "", formField.getConfidence());
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry -> System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
}));
} | .setPollInterval(Duration.ofSeconds(5)), Context.NONE) | public static void main(String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
List<RecognizedForm> formsWithLabeledModel =
client.beginRecognizeCustomForms(
new FileInputStream(analyzeFile), analyzeFile.length(),
"{labeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)), Context.NONE)
.getFinalResult();
List<RecognizedForm> formsWithUnlabeledModel =
client.beginRecognizeCustomForms(new FileInputStream(analyzeFile), analyzeFile.length(),
"{unlabeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)), Context.NONE)
.getFinalResult();
System.out.println("--------Recognizing forms with labeled custom model--------");
formsWithLabeledModel.forEach(labeledForm -> labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "MerchantName".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
}));
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
formsWithUnlabeledModel.forEach(unLabeledForm -> unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), "", formField.getConfidence());
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry -> System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
}));
} | class AdvancedDiffLabeledUnlabeledData {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class AdvancedDiffLabeledUnlabeledData {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
Was thinking of holding off on this change until we have the underlying implementation for full context passing support. | public static void main(String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
List<RecognizedForm> formsWithLabeledModel =
client.beginRecognizeCustomForms(
new FileInputStream(analyzeFile), analyzeFile.length(),
"{labeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)), Context.NONE)
.getFinalResult();
List<RecognizedForm> formsWithUnlabeledModel =
client.beginRecognizeCustomForms(new FileInputStream(analyzeFile), analyzeFile.length(),
"{unlabeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)), Context.NONE)
.getFinalResult();
System.out.println("--------Recognizing forms with labeled custom model--------");
formsWithLabeledModel.forEach(labeledForm -> labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "MerchantName".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
}));
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
formsWithUnlabeledModel.forEach(unLabeledForm -> unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), "", formField.getConfidence());
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry -> System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
}));
} | .setPollInterval(Duration.ofSeconds(5)), Context.NONE) | public static void main(String[] args) throws IOException {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
List<RecognizedForm> formsWithLabeledModel =
client.beginRecognizeCustomForms(
new FileInputStream(analyzeFile), analyzeFile.length(),
"{labeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)), Context.NONE)
.getFinalResult();
List<RecognizedForm> formsWithUnlabeledModel =
client.beginRecognizeCustomForms(new FileInputStream(analyzeFile), analyzeFile.length(),
"{unlabeled_model_Id}",
new RecognizeOptions()
.setContentType(FormContentType.APPLICATION_PDF)
.setIncludeFieldElements(true)
.setPollInterval(Duration.ofSeconds(5)), Context.NONE)
.getFinalResult();
System.out.println("--------Recognizing forms with labeled custom model--------");
formsWithLabeledModel.forEach(labeledForm -> labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "MerchantName".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
}));
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
formsWithUnlabeledModel.forEach(unLabeledForm -> unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueData().getBoundingBox() != null) {
formField.getValueData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
if (formField.getLabelData() != null && formField.getLabelData().getBoundingBox() != null) {
formField.getLabelData().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelData().getText(), "", formField.getConfidence());
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getValue(), formField.getValueData().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelData().getText()))
.findAny()
.ifPresent(formFieldEntry -> System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
}));
} | class AdvancedDiffLabeledUnlabeledData {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class AdvancedDiffLabeledUnlabeledData {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} |
I think these examples need to be more concise seeing they get injected into javadocs esp, it can make the javdocs pretty verbose. | public void analyzeSentimentWithLanguageWithOpinionMining() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", true, "en");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
List<AspectSentiment> positiveAspects = new ArrayList<>();
List<AspectSentiment> mixedAspects = new ArrayList<>();
List<AspectSentiment> negativeAspects = new ArrayList<>();
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
sentenceSentiment.getAspects().forEach(aspectSentiment -> {
TextSentiment aspectTextSentiment = aspectSentiment.getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeAspects.add(aspectSentiment);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveAspects.add(aspectSentiment);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedAspects.add(aspectSentiment);
}
});
}
System.out.printf("Positive aspects count: %d%n", positiveAspects.size());
for (AspectSentiment positiveAspect : positiveAspects) {
System.out.printf("\tAspect: %s%n", positiveAspect.getText());
for (OpinionSentiment opinionSentiment : positiveAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Mixed aspects count: %d%n", mixedAspects.size());
for (AspectSentiment mixedAspect : mixedAspects) {
System.out.printf("\tAspect: %s%n", mixedAspect.getText());
for (OpinionSentiment opinionSentiment : mixedAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Negative aspects count: %d%n", negativeAspects.size());
for (AspectSentiment negativeAspect : negativeAspects) {
System.out.printf("\tAspect: %s%n", negativeAspect.getText());
for (OpinionSentiment opinionSentiment : negativeAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
} | final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment( | public void analyzeSentimentWithLanguageWithOpinionMining() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
}
} | class TextAnalyticsClientJavaDocCodeSnippets {
private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguage() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageWithCountryHint() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(
"This text is in English", "US");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
DetectLanguageResultCollection resultCollection =
textAnalyticsClient.detectLanguageBatch(documents, "US", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "es")
);
Response<DetectLanguageResultCollection> response =
textAnalyticsClient.detectLanguageBatchWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection detectedLanguageResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = detectedLanguageResultCollection.getStatistics();
System.out.printf(
"Documents statistics: document count = %s, erroneous document count = %s, transaction count = %s,"
+ " valid document count = %s.%n",
batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(),
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
detectedLanguageResultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntities() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesWithLanguage() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft", "en");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
RecognizeEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en")
);
Response<RecognizeEntitiesResultCollection> response =
textAnalyticsClient.recognizeEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection recognizeEntitiesResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
recognizeEntitiesResultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntities() {
final String document = "Old Faithful is a geyser at Yellowstone Park.";
System.out.println("Linked Entities:");
textAnalyticsClient.recognizeLinkedEntities(document).forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsClient.recognizeLinkedEntities(document, "en").forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
RecognizeLinkedEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeLinkedEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesBatchMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.").setLanguage("en")
);
Response<RecognizeLinkedEntitiesResultCollection> response =
textAnalyticsClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
for (String keyPhrase : textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.")) {
System.out.printf("%s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.", "en")
.forEach(kegPhrase -> System.out.printf("%s.%n", kegPhrase));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
ExtractKeyPhrasesResultCollection resultCollection =
textAnalyticsClient.extractKeyPhrasesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.").setLanguage("en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.").setLanguage("en")
);
Response<ExtractKeyPhrasesResultCollection> response =
textAnalyticsClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentiment() {
final DocumentSentiment documentSentiment =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguage() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection =
textAnalyticsClient.analyzeSentimentBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection =
textAnalyticsClient.analyzeSentimentBatch(documents, true, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
List<AspectSentiment> positiveAspects = new ArrayList<>();
List<AspectSentiment> mixedAspects = new ArrayList<>();
List<AspectSentiment> negativeAspects = new ArrayList<>();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
sentenceSentiment.getAspects().forEach(aspectSentiment -> {
TextSentiment aspectTextSentiment = aspectSentiment.getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeAspects.add(aspectSentiment);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveAspects.add(aspectSentiment);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedAspects.add(aspectSentiment);
}
});
});
System.out.printf("Positive aspects count: %d%n", positiveAspects.size());
for (AspectSentiment positiveAspect : positiveAspects) {
System.out.printf("\tAspect: %s%n", positiveAspect.getText());
for (OpinionSentiment opinionSentiment : positiveAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Mixed aspects count: %d%n", mixedAspects.size());
for (AspectSentiment mixedAspect : mixedAspects) {
System.out.printf("\tAspect: %s%n", mixedAspect.getText());
for (OpinionSentiment opinionSentiment : mixedAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Negative aspects count: %d%n", negativeAspects.size());
for (AspectSentiment negativeAspect : negativeAspects) {
System.out.printf("\tAspect: %s%n", negativeAspect.getText());
for (OpinionSentiment opinionSentiment : negativeAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs, true,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
List<AspectSentiment> positiveAspects = new ArrayList<>();
List<AspectSentiment> mixedAspects = new ArrayList<>();
List<AspectSentiment> negativeAspects = new ArrayList<>();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
sentenceSentiment.getAspects().forEach(aspectSentiment -> {
TextSentiment aspectTextSentiment = aspectSentiment.getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeAspects.add(aspectSentiment);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveAspects.add(aspectSentiment);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedAspects.add(aspectSentiment);
}
});
});
System.out.printf("Positive aspects count: %d%n", positiveAspects.size());
for (AspectSentiment positiveAspect : positiveAspects) {
System.out.printf("\tAspect: %s%n", positiveAspect.getText());
for (OpinionSentiment opinionSentiment : positiveAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Mixed aspects count: %d%n", mixedAspects.size());
for (AspectSentiment mixedAspect : mixedAspects) {
System.out.printf("\tAspect: %s%n", mixedAspect.getText());
for (OpinionSentiment opinionSentiment : mixedAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Negative aspects count: %d%n", negativeAspects.size());
for (AspectSentiment negativeAspect : negativeAspects) {
System.out.printf("\tAspect: %s%n", negativeAspect.getText());
for (OpinionSentiment opinionSentiment : negativeAspect.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Does the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
} | class TextAnalyticsClientJavaDocCodeSnippets {
private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguage() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageWithCountryHint() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(
"This text is in English", "US");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
DetectLanguageResultCollection resultCollection =
textAnalyticsClient.detectLanguageBatch(documents, "US", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "es")
);
Response<DetectLanguageResultCollection> response =
textAnalyticsClient.detectLanguageBatchWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection detectedLanguageResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = detectedLanguageResultCollection.getStatistics();
System.out.printf(
"Documents statistics: document count = %s, erroneous document count = %s, transaction count = %s,"
+ " valid document count = %s.%n",
batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(),
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
detectedLanguageResultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntities() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesWithLanguage() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft", "en");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
RecognizeEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en")
);
Response<RecognizeEntitiesResultCollection> response =
textAnalyticsClient.recognizeEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection recognizeEntitiesResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
recognizeEntitiesResultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntities() {
for (PiiEntity entity : textAnalyticsClient.recognizePiiEntities("My SSN is 859-98-0987")) {
System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesWithLanguage() {
textAnalyticsClient.recognizePiiEntities("My SSN is 859-98-0987", "en")
.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987",
"Visa card 4111 1111 1111 1111"
);
RecognizePiiEntitiesResultCollection resultCollection = textAnalyticsClient.recognizePiiEntitiesBatch(
documents, "en", new TextAnalyticsRequestOptions().setIncludeStatistics(true));
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111")
);
Response<RecognizePiiEntitiesResultCollection> response =
textAnalyticsClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
RecognizePiiEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntities() {
final String document = "Old Faithful is a geyser at Yellowstone Park.";
System.out.println("Linked Entities:");
textAnalyticsClient.recognizeLinkedEntities(document).forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsClient.recognizeLinkedEntities(document, "en").forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
RecognizeLinkedEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeLinkedEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesBatchMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.").setLanguage("en")
);
Response<RecognizeLinkedEntitiesResultCollection> response =
textAnalyticsClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
for (String keyPhrase : textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.")) {
System.out.printf("%s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.", "en")
.forEach(kegPhrase -> System.out.printf("%s.%n", kegPhrase));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
ExtractKeyPhrasesResultCollection resultCollection =
textAnalyticsClient.extractKeyPhrasesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.").setLanguage("en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.").setLanguage("en")
);
Response<ExtractKeyPhrasesResultCollection> response =
textAnalyticsClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentiment() {
final DocumentSentiment documentSentiment =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguage() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection = textAnalyticsClient.analyzeSentimentBatch(
documents, "en", new TextAnalyticsRequestOptions().setIncludeStatistics(true));
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection = textAnalyticsClient.analyzeSentimentBatch(
documents, "en", new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true)
.setIncludeStatistics(true);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs, options, Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
}
} |
Are we only creating the opinion list to print out the size ? Seems like not adding a lot of value. This example could be simplified I think. | public static void main(String[] args) {
TextAnalyticsClient client = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
String document = "Bad atmosphere. Not close to plenty of restaurants, hotels, and transit! Staff are not friendly and helpful.";
System.out.printf("Text = %s%n", document);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
final DocumentSentiment documentSentiment = client.analyzeSentiment(document, "en", options);
SentimentConfidenceScores scores = documentSentiment.getConfidenceScores();
System.out.printf(
"Recognized document sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
documentSentiment.getSentiment(), scores.getPositive(), scores.getNeutral(), scores.getNegative());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
SentimentConfidenceScores sentenceScores = sentenceSentiment.getConfidenceScores();
System.out.printf("\tSentence sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
sentenceSentiment.getSentiment(), sentenceScores.getPositive(), sentenceScores.getNeutral(), sentenceScores.getNegative());
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
System.out.printf("Positive aspects count: %d%n", positiveMinedOpinions.size());
for (MinedOpinion positiveMinedOpinion : positiveMinedOpinions) {
System.out.printf("\tAspect: %s%n", positiveMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : positiveMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Mixed aspects count: %d%n", mixedMinedOpinions.size());
for (MinedOpinion mixedMinedOpinion : mixedMinedOpinions) {
System.out.printf("\tAspect: %s%n", mixedMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : mixedMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
} | System.out.printf("Positive aspects count: %d%n", positiveMinedOpinions.size()); | public static void main(String[] args) {
TextAnalyticsClient client = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
String document = "Bad atmosphere. Not close to plenty of restaurants, hotels, and transit! Staff are not friendly and helpful.";
System.out.printf("Text = %s%n", document);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
final DocumentSentiment documentSentiment = client.analyzeSentiment(document, "en", options);
SentimentConfidenceScores scores = documentSentiment.getConfidenceScores();
System.out.printf(
"Recognized document sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
documentSentiment.getSentiment(), scores.getPositive(), scores.getNeutral(), scores.getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
SentimentConfidenceScores sentenceScores = sentenceSentiment.getConfidenceScores();
System.out.printf("\tSentence sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
sentenceSentiment.getSentiment(), sentenceScores.getPositive(), sentenceScores.getNeutral(), sentenceScores.getNegative());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
} | class AnalyzeSentimentWithOpinionMining {
/**
* Main method to invoke this demo about how to analyze the sentiment of document.
*
* @param args Unused arguments to the program.
*/
} | class AnalyzeSentimentWithOpinionMining {
/**
* Main method to invoke this demo about how to analyze the sentiment of document.
*
* @param args Unused arguments to the program.
*/
} |
Don't think its a good idea to be making these lists, can't we directly add the content from L508-l513 here? Applicable for all examples below. | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
} | mixedMinedOpinions.add(minedOpinion); | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", (TextAnalyticsRequestOptions) null).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true)
.setRequestOptions(new TextAnalyticsRequestOptions().setIncludeStatistics(true));
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
unused? | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
} | List<MinedOpinion> mixedMinedOpinions = new ArrayList<>(); | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", (TextAnalyticsRequestOptions) null).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true)
.setRequestOptions(new TextAnalyticsRequestOptions().setIncludeStatistics(true));
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
Not a good example to show. Please use the options more efficiently or use other overload. Applicable for all examples below. | public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", (TextAnalyticsRequestOptions) null).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
} | textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", (TextAnalyticsRequestOptions) null).subscribe( | public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true)
.setRequestOptions(new TextAnalyticsRequestOptions().setIncludeStatistics(true));
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
Consider providing an empty object | public void analyzeSentimentBatchNullInput() {
StepVerifier.create(client.analyzeSentimentBatch(null, null, (TextAnalyticsRequestOptions) null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
} | StepVerifier.create(client.analyzeSentimentBatch(null, null, (TextAnalyticsRequestOptions) null)) | public void analyzeSentimentBatchNullInput() {
StepVerifier.create(client.analyzeSentimentBatch(null, null, new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
} | class DocumentInputAsyncTest {
static TextAnalyticsAsyncClient client;
@BeforeAll
protected static void beforeTest() {
client = new TextAnalyticsClientBuilder()
.endpoint(VALID_HTTPS_LOCALHOST)
.credential(new AzureKeyCredential("fakeKey"))
.buildAsyncClient();
}
@AfterAll
protected static void afterTest() {
client = null;
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguageNullInput() {
StepVerifier.create(client.detectLanguage(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguageNullInputWithCountryHint() {
StepVerifier.create(client.detectLanguage(null, "US"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchNullInput() {
StepVerifier.create(client.detectLanguageBatch(null, null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchEmptyInputList() {
StepVerifier.create(client.detectLanguageBatch(Collections.emptyList(), null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchNullInputWithCountryHint() {
StepVerifier.create(client.detectLanguageBatch(null, "US", null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchEmptyInputListWithCountryHint() {
StepVerifier.create(client.detectLanguageBatch(Collections.emptyList(), "US", null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchNullInputWithCountryHintAndRequestOptions() {
StepVerifier.create(client.detectLanguageBatch(null, "US",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchEmptyInputListWithCountryHintAndRequestOptions() {
StepVerifier.create(client.detectLanguageBatch(Collections.emptyList(), "US",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchNullInputWithMaxOverload() {
StepVerifier.create(client.detectLanguageBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchEmptyInputListWithMaxOverload() {
StepVerifier.create(client.detectLanguageBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesNullInput() {
StepVerifier.create(client.recognizeEntities(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesNullInputWithLanguageHint() {
StepVerifier.create(client.recognizeEntities(null, "en"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchNullInput() {
StepVerifier.create(client.recognizeEntitiesBatch(null, null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchEmptyInputList() {
StepVerifier.create(client.recognizeEntitiesBatch(Collections.emptyList(), null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchNullInputWithLanguageHint() {
StepVerifier.create(client.recognizeEntitiesBatch(null, "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchEmptyInputListWithLanguageHint() {
StepVerifier.create(client.recognizeEntitiesBatch(Collections.emptyList(), "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchNullInputWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.recognizeEntitiesBatch(null, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchEmptyInputListWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.recognizeEntitiesBatch(Collections.emptyList(), "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchNullInputWithMaxOverload() {
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of {@link TextDocumentInput} is
* given for {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchEmptyInputListWithMaxOverload() {
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesNullInput() {
StepVerifier.create(client.recognizeLinkedEntities(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesNullInputWithLanguageHint() {
StepVerifier.create(client.recognizeLinkedEntities(null, "en"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchNullInput() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(null, null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchEmptyInputList() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(Collections.emptyList(), null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchNullInputWithLanguageHint() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(null, "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchEmptyInputListWithLanguageHint() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(Collections.emptyList(), "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchNullInputWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(null, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchEmptyInputListWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(Collections.emptyList(), "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchNullInputWithMaxOverload() {
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of {@link TextDocumentInput} is
* given for {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchEmptyInputListWithMaxOverload() {
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesNullInput() {
StepVerifier.create(client.extractKeyPhrases(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesNullInputWithLanguageHint() {
StepVerifier.create(client.extractKeyPhrases(null, "en"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchNullInput() {
StepVerifier.create(client.extractKeyPhrasesBatch(null, null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchEmptyInputList() {
StepVerifier.create(client.extractKeyPhrasesBatch(Collections.emptyList(), null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchNullInputWithLanguageHint() {
StepVerifier.create(client.extractKeyPhrasesBatch(null, "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchEmptyInputListWithLanguageHint() {
StepVerifier.create(client.extractKeyPhrasesBatch(Collections.emptyList(), "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchNullInputWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.extractKeyPhrasesBatch(null, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchEmptyInputListWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.extractKeyPhrasesBatch(Collections.emptyList(), "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchNullInputWithMaxOverload() {
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of {@link TextDocumentInput} is
* given for {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchEmptyInputListWithMaxOverload() {
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentNullInput() {
StepVerifier.create(client.analyzeSentiment(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentNullInputWithLanguageHint() {
StepVerifier.create(client.analyzeSentiment(null, "en"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchEmptyInputList() {
StepVerifier.create(client.analyzeSentimentBatch(Collections.emptyList(), null, (TextAnalyticsRequestOptions) null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchNullInputWithLanguageHint() {
StepVerifier.create(client.analyzeSentimentBatch(null, "en", (TextAnalyticsRequestOptions) null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchEmptyInputListWithLanguageHint() {
StepVerifier.create(client.analyzeSentimentBatch(Collections.emptyList(), "en", (TextAnalyticsRequestOptions) null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchNullInputWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.analyzeSentimentBatch(null, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchEmptyInputListWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.analyzeSentimentBatch(Collections.emptyList(), "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchNullInputWithMaxOverload() {
StepVerifier.create(client.analyzeSentimentBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of {@link TextDocumentInput} is
* given for {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentEmptyInputListWithMaxOverload() {
StepVerifier.create(client.analyzeSentimentBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
} | class DocumentInputAsyncTest {
static TextAnalyticsAsyncClient client;
@BeforeAll
protected static void beforeTest() {
client = new TextAnalyticsClientBuilder()
.endpoint(VALID_HTTPS_LOCALHOST)
.credential(new AzureKeyCredential("fakeKey"))
.buildAsyncClient();
}
@AfterAll
protected static void afterTest() {
client = null;
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguageNullInput() {
StepVerifier.create(client.detectLanguage(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguageNullInputWithCountryHint() {
StepVerifier.create(client.detectLanguage(null, "US"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchNullInput() {
StepVerifier.create(client.detectLanguageBatch(null, null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchEmptyInputList() {
StepVerifier.create(client.detectLanguageBatch(Collections.emptyList(), null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchNullInputWithCountryHint() {
StepVerifier.create(client.detectLanguageBatch(null, "US", null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchEmptyInputListWithCountryHint() {
StepVerifier.create(client.detectLanguageBatch(Collections.emptyList(), "US", null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchNullInputWithCountryHintAndRequestOptions() {
StepVerifier.create(client.detectLanguageBatch(null, "US",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchEmptyInputListWithCountryHintAndRequestOptions() {
StepVerifier.create(client.detectLanguageBatch(Collections.emptyList(), "US",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchNullInputWithMaxOverload() {
StepVerifier.create(client.detectLanguageBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void detectLanguagesBatchEmptyInputListWithMaxOverload() {
StepVerifier.create(client.detectLanguageBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesNullInput() {
StepVerifier.create(client.recognizeEntities(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesNullInputWithLanguageHint() {
StepVerifier.create(client.recognizeEntities(null, "en"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchNullInput() {
StepVerifier.create(client.recognizeEntitiesBatch(null, null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchEmptyInputList() {
StepVerifier.create(client.recognizeEntitiesBatch(Collections.emptyList(), null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchNullInputWithLanguageHint() {
StepVerifier.create(client.recognizeEntitiesBatch(null, "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchEmptyInputListWithLanguageHint() {
StepVerifier.create(client.recognizeEntitiesBatch(Collections.emptyList(), "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchNullInputWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.recognizeEntitiesBatch(null, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchEmptyInputListWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.recognizeEntitiesBatch(Collections.emptyList(), "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchNullInputWithMaxOverload() {
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of {@link TextDocumentInput} is
* given for {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeEntitiesBatchEmptyInputListWithMaxOverload() {
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesNullInput() {
StepVerifier.create(client.recognizeLinkedEntities(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesNullInputWithLanguageHint() {
StepVerifier.create(client.recognizeLinkedEntities(null, "en"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchNullInput() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(null, null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchEmptyInputList() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(Collections.emptyList(), null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchNullInputWithLanguageHint() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(null, "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchEmptyInputListWithLanguageHint() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(Collections.emptyList(), "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchNullInputWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(null, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchEmptyInputListWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.recognizeLinkedEntitiesBatch(Collections.emptyList(), "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchNullInputWithMaxOverload() {
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of {@link TextDocumentInput} is
* given for {@link TextAnalyticsAsyncClient
*/
@Test
public void recognizeLinkedEntitiesBatchEmptyInputListWithMaxOverload() {
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesNullInput() {
StepVerifier.create(client.extractKeyPhrases(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesNullInputWithLanguageHint() {
StepVerifier.create(client.extractKeyPhrases(null, "en"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchNullInput() {
StepVerifier.create(client.extractKeyPhrasesBatch(null, null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchEmptyInputList() {
StepVerifier.create(client.extractKeyPhrasesBatch(Collections.emptyList(), null, null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchNullInputWithLanguageHint() {
StepVerifier.create(client.extractKeyPhrasesBatch(null, "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchEmptyInputListWithLanguageHint() {
StepVerifier.create(client.extractKeyPhrasesBatch(Collections.emptyList(), "en", null))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchNullInputWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.extractKeyPhrasesBatch(null, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchEmptyInputListWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.extractKeyPhrasesBatch(Collections.emptyList(), "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchNullInputWithMaxOverload() {
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of {@link TextDocumentInput} is
* given for {@link TextAnalyticsAsyncClient
*/
@Test
public void extractKeyPhrasesBatchEmptyInputListWithMaxOverload() {
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentNullInput() {
StepVerifier.create(client.analyzeSentiment(null))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null document is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentNullInputWithLanguageHint() {
StepVerifier.create(client.analyzeSentiment(null, "en"))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchEmptyInputList() {
StepVerifier.create(client.analyzeSentimentBatch(Collections.emptyList(), null, new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchNullInputWithLanguageHint() {
StepVerifier.create(client.analyzeSentimentBatch(null, "en", new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchEmptyInputListWithLanguageHint() {
StepVerifier.create(client.analyzeSentimentBatch(Collections.emptyList(), "en", new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchNullInputWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.analyzeSentimentBatch(null, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchEmptyInputListWithLanguageHintAndRequestOptions() {
StepVerifier.create(client.analyzeSentimentBatch(Collections.emptyList(), "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that a {@link NullPointerException} is thrown when null documents is given for
* {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentBatchNullInputWithMaxOverload() {
StepVerifier.create(client.analyzeSentimentBatchWithResponse(null,
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(NullPointerException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_BATCH_NPE_MESSAGE.equals(exception.getMessage()));
});
}
/**
* Verifies that an {@link IllegalArgumentException} is thrown when an empty list of {@link TextDocumentInput} is
* given for {@link TextAnalyticsAsyncClient
*/
@Test
public void analyzeSentimentEmptyInputListWithMaxOverload() {
StepVerifier.create(client.analyzeSentimentBatchWithResponse(Collections.emptyList(),
new TextAnalyticsRequestOptions().setIncludeStatistics(true)))
.verifyErrorSatisfies(exception -> {
assertEquals(IllegalArgumentException.class, exception.getClass());
assertTrue(INVALID_DOCUMENT_EMPTY_LIST_EXCEPTION_MESSAGE.equals(exception.getMessage()));
});
}
} |
We are starting with printing out the size. The for loop, it loops all mined opinions. | public static void main(String[] args) {
TextAnalyticsClient client = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
String document = "Bad atmosphere. Not close to plenty of restaurants, hotels, and transit! Staff are not friendly and helpful.";
System.out.printf("Text = %s%n", document);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
final DocumentSentiment documentSentiment = client.analyzeSentiment(document, "en", options);
SentimentConfidenceScores scores = documentSentiment.getConfidenceScores();
System.out.printf(
"Recognized document sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
documentSentiment.getSentiment(), scores.getPositive(), scores.getNeutral(), scores.getNegative());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
SentimentConfidenceScores sentenceScores = sentenceSentiment.getConfidenceScores();
System.out.printf("\tSentence sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
sentenceSentiment.getSentiment(), sentenceScores.getPositive(), sentenceScores.getNeutral(), sentenceScores.getNegative());
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
System.out.printf("Positive aspects count: %d%n", positiveMinedOpinions.size());
for (MinedOpinion positiveMinedOpinion : positiveMinedOpinions) {
System.out.printf("\tAspect: %s%n", positiveMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : positiveMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Mixed aspects count: %d%n", mixedMinedOpinions.size());
for (MinedOpinion mixedMinedOpinion : mixedMinedOpinions) {
System.out.printf("\tAspect: %s%n", mixedMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : mixedMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
} | System.out.printf("Positive aspects count: %d%n", positiveMinedOpinions.size()); | public static void main(String[] args) {
TextAnalyticsClient client = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
String document = "Bad atmosphere. Not close to plenty of restaurants, hotels, and transit! Staff are not friendly and helpful.";
System.out.printf("Text = %s%n", document);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
final DocumentSentiment documentSentiment = client.analyzeSentiment(document, "en", options);
SentimentConfidenceScores scores = documentSentiment.getConfidenceScores();
System.out.printf(
"Recognized document sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
documentSentiment.getSentiment(), scores.getPositive(), scores.getNeutral(), scores.getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
SentimentConfidenceScores sentenceScores = sentenceSentiment.getConfidenceScores();
System.out.printf("\tSentence sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
sentenceSentiment.getSentiment(), sentenceScores.getPositive(), sentenceScores.getNeutral(), sentenceScores.getNegative());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
} | class AnalyzeSentimentWithOpinionMining {
/**
* Main method to invoke this demo about how to analyze the sentiment of document.
*
* @param args Unused arguments to the program.
*/
} | class AnalyzeSentimentWithOpinionMining {
/**
* Main method to invoke this demo about how to analyze the sentiment of document.
*
* @param args Unused arguments to the program.
*/
} |
it should be neutral, not mixed. I will fix it. | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
} | List<MinedOpinion> mixedMinedOpinions = new ArrayList<>(); | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", (TextAnalyticsRequestOptions) null).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true)
.setRequestOptions(new TextAnalyticsRequestOptions().setIncludeStatistics(true));
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
I am wrong. AspectSentiment and OpinionSentiment could have positive, negative, and mixed. So it should keep as it is. | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
} | List<MinedOpinion> mixedMinedOpinions = new ArrayList<>(); | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", (TextAnalyticsRequestOptions) null).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true)
.setRequestOptions(new TextAnalyticsRequestOptions().setIncludeStatistics(true));
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
ignore my last comment. Changed the sample scenarios already. | public static void main(String[] args) {
TextAnalyticsClient client = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
String document = "Bad atmosphere. Not close to plenty of restaurants, hotels, and transit! Staff are not friendly and helpful.";
System.out.printf("Text = %s%n", document);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
final DocumentSentiment documentSentiment = client.analyzeSentiment(document, "en", options);
SentimentConfidenceScores scores = documentSentiment.getConfidenceScores();
System.out.printf(
"Recognized document sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
documentSentiment.getSentiment(), scores.getPositive(), scores.getNeutral(), scores.getNegative());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
SentimentConfidenceScores sentenceScores = sentenceSentiment.getConfidenceScores();
System.out.printf("\tSentence sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
sentenceSentiment.getSentiment(), sentenceScores.getPositive(), sentenceScores.getNeutral(), sentenceScores.getNegative());
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
System.out.printf("Positive aspects count: %d%n", positiveMinedOpinions.size());
for (MinedOpinion positiveMinedOpinion : positiveMinedOpinions) {
System.out.printf("\tAspect: %s%n", positiveMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : positiveMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Mixed aspects count: %d%n", mixedMinedOpinions.size());
for (MinedOpinion mixedMinedOpinion : mixedMinedOpinions) {
System.out.printf("\tAspect: %s%n", mixedMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : mixedMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
} | System.out.printf("Positive aspects count: %d%n", positiveMinedOpinions.size()); | public static void main(String[] args) {
TextAnalyticsClient client = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
String document = "Bad atmosphere. Not close to plenty of restaurants, hotels, and transit! Staff are not friendly and helpful.";
System.out.printf("Text = %s%n", document);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
final DocumentSentiment documentSentiment = client.analyzeSentiment(document, "en", options);
SentimentConfidenceScores scores = documentSentiment.getConfidenceScores();
System.out.printf(
"Recognized document sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
documentSentiment.getSentiment(), scores.getPositive(), scores.getNeutral(), scores.getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
SentimentConfidenceScores sentenceScores = sentenceSentiment.getConfidenceScores();
System.out.printf("\tSentence sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n",
sentenceSentiment.getSentiment(), sentenceScores.getPositive(), sentenceScores.getNeutral(), sentenceScores.getNegative());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
} | class AnalyzeSentimentWithOpinionMining {
/**
* Main method to invoke this demo about how to analyze the sentiment of document.
*
* @param args Unused arguments to the program.
*/
} | class AnalyzeSentimentWithOpinionMining {
/**
* Main method to invoke this demo about how to analyze the sentiment of document.
*
* @param args Unused arguments to the program.
*/
} |
updated | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
}
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
} | mixedMinedOpinions.add(minedOpinion); | public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", (TextAnalyticsRequestOptions) null).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true)
.setRequestOptions(new TextAnalyticsRequestOptions().setIncludeStatistics(true));
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
List<MinedOpinion> positiveMinedOpinions = new ArrayList<>();
List<MinedOpinion> mixedMinedOpinions = new ArrayList<>();
List<MinedOpinion> negativeMinedOpinions = new ArrayList<>();
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
sentenceSentiment.getMinedOpinions().forEach(minedOpinion -> {
TextSentiment aspectTextSentiment = minedOpinion.getAspect().getSentiment();
if (NEGATIVE.equals(aspectTextSentiment)) {
negativeMinedOpinions.add(minedOpinion);
} else if (POSITIVE.equals(aspectTextSentiment)) {
positiveMinedOpinions.add(minedOpinion);
} else if (MIXED.equals(aspectTextSentiment)) {
mixedMinedOpinions.add(minedOpinion);
}
});
});
});
System.out.printf("Negative aspects count: %d%n", negativeMinedOpinions.size());
for (MinedOpinion negativeMinedOpinion : negativeMinedOpinions) {
System.out.printf("\tAspect: %s%n", negativeMinedOpinion.getAspect().getText());
for (OpinionSentiment opinionSentiment : negativeMinedOpinion.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
}
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
Did you have to change this because of an error in the compiler or something? | public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", new TextAnalyticsRequestOptions()).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
} | textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", new TextAnalyticsRequestOptions()).subscribe( | public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
it is the opinion that is negated, not the aspect :) | public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true)
.setIncludeStatistics(true);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs, options, Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
} | System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n", | public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true)
.setIncludeStatistics(true);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs, options, Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
} | class TextAnalyticsClientJavaDocCodeSnippets {
private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguage() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageWithCountryHint() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(
"This text is in English", "US");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
DetectLanguageResultCollection resultCollection =
textAnalyticsClient.detectLanguageBatch(documents, "US", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "es")
);
Response<DetectLanguageResultCollection> response =
textAnalyticsClient.detectLanguageBatchWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection detectedLanguageResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = detectedLanguageResultCollection.getStatistics();
System.out.printf(
"Documents statistics: document count = %s, erroneous document count = %s, transaction count = %s,"
+ " valid document count = %s.%n",
batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(),
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
detectedLanguageResultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntities() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesWithLanguage() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft", "en");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
RecognizeEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en")
);
Response<RecognizeEntitiesResultCollection> response =
textAnalyticsClient.recognizeEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection recognizeEntitiesResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
recognizeEntitiesResultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntities() {
final String document = "Old Faithful is a geyser at Yellowstone Park.";
System.out.println("Linked Entities:");
textAnalyticsClient.recognizeLinkedEntities(document).forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsClient.recognizeLinkedEntities(document, "en").forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
RecognizeLinkedEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeLinkedEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesBatchMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.").setLanguage("en")
);
Response<RecognizeLinkedEntitiesResultCollection> response =
textAnalyticsClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
for (String keyPhrase : textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.")) {
System.out.printf("%s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.", "en")
.forEach(kegPhrase -> System.out.printf("%s.%n", kegPhrase));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
ExtractKeyPhrasesResultCollection resultCollection =
textAnalyticsClient.extractKeyPhrasesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.").setLanguage("en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.").setLanguage("en")
);
Response<ExtractKeyPhrasesResultCollection> response =
textAnalyticsClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentiment() {
final DocumentSentiment documentSentiment =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguage() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection =
textAnalyticsClient.analyzeSentimentBatch(documents, "en", new TextAnalyticsRequestOptions());
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection = textAnalyticsClient.analyzeSentimentBatch(
documents, "en", new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
} | class TextAnalyticsClientJavaDocCodeSnippets {
private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient();
/**
* Code snippet for creating a {@link TextAnalyticsClient} with pipeline
*/
public void createTextAnalyticsClientWithPipeline() {
HttpPipeline pipeline = new HttpPipelineBuilder()
.policies(/* add policies */)
.build();
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.pipeline(pipeline)
.buildClient();
}
/**
* Code snippet for creating a {@link TextAnalyticsClient}
*/
public void createTextAnalyticsClient() {
TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildClient();
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguage() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage("Bonjour tout le monde");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageWithCountryHint() {
DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(
"This text is in English", "US");
System.out.printf("Detected language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore());
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
DetectLanguageResultCollection resultCollection =
textAnalyticsClient.detectLanguageBatch(documents, "US", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "es")
);
Response<DetectLanguageResultCollection> response =
textAnalyticsClient.detectLanguageBatchWithResponse(detectLanguageInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection detectedLanguageResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = detectedLanguageResultCollection.getStatistics();
System.out.printf(
"Documents statistics: document count = %s, erroneous document count = %s, transaction count = %s,"
+ " valid document count = %s.%n",
batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(),
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
detectedLanguageResultCollection.forEach(detectLanguageResult -> {
System.out.printf("Document ID: %s%n", detectLanguageResult.getId());
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Primary language name: %s, ISO 6391 name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntities() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesWithLanguage() {
final CategorizedEntityCollection recognizeEntitiesResult =
textAnalyticsClient.recognizeEntities("Satya Nadella is the CEO of Microsoft", "en");
for (CategorizedEntity entity : recognizeEntitiesResult) {
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
RecognizeEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en")
);
Response<RecognizeEntitiesResultCollection> response =
textAnalyticsClient.recognizeEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection recognizeEntitiesResultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
recognizeEntitiesResultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity ->
System.out.printf("Recognized entity: %s, entity category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntities() {
for (PiiEntity entity : textAnalyticsClient.recognizePiiEntities("My SSN is 859-98-0987")) {
System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesWithLanguage() {
textAnalyticsClient.recognizePiiEntities("My SSN is 859-98-0987", "en")
.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987",
"Visa card 4111 1111 1111 1111"
);
RecognizePiiEntitiesResultCollection resultCollection = textAnalyticsClient.recognizePiiEntitiesBatch(
documents, "en", new TextAnalyticsRequestOptions().setIncludeStatistics(true));
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987"),
new TextDocumentInput("1", "Visa card 4111 1111 1111 1111")
);
Response<RecognizePiiEntitiesResultCollection> response =
textAnalyticsClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
RecognizePiiEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntities() {
final String document = "Old Faithful is a geyser at Yellowstone Park.";
System.out.println("Linked Entities:");
textAnalyticsClient.recognizeLinkedEntities(document).forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsClient.recognizeLinkedEntities(document, "en").forEach(linkedEntity -> {
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
RecognizeLinkedEntitiesResultCollection resultCollection =
textAnalyticsClient.recognizeLinkedEntitiesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void recognizeLinkedEntitiesBatchMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("2", "Mount Shasta has lenticular clouds.").setLanguage("en")
);
Response<RecognizeLinkedEntitiesResultCollection> response =
textAnalyticsClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
for (String keyPhrase : textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.")) {
System.out.printf("%s.%n", keyPhrase);
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsClient.extractKeyPhrases("My cat might need to see a veterinarian.", "en")
.forEach(kegPhrase -> System.out.printf("%s.%n", kegPhrase));
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My cat might need to see a veterinarian.",
"The pitot tube is used to measure airspeed."
);
ExtractKeyPhrasesResultCollection resultCollection =
textAnalyticsClient.extractKeyPhrasesBatch(documents, "en", null);
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "My cat might need to see a veterinarian.").setLanguage("en"),
new TextDocumentInput("2", "The pitot tube is used to measure airspeed.").setLanguage("en")
);
Response<ExtractKeyPhrasesResultCollection> response =
textAnalyticsClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf(
"A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(extractKeyPhraseResult -> {
System.out.printf("Document ID: %s%n", extractKeyPhraseResult.getId());
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentiment() {
final DocumentSentiment documentSentiment =
textAnalyticsClient.analyzeSentiment("The hotel was dark and unclean.");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguage() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en");
System.out.printf(
"Recognized sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
final DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(
"The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
}
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection = textAnalyticsClient.analyzeSentimentBatch(
documents, "en", new TextAnalyticsRequestOptions().setIncludeStatistics(true));
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean."
);
AnalyzeSentimentResultCollection resultCollection = textAnalyticsClient.analyzeSentimentBatch(
documents, "en", new AnalyzeSentimentOptions().setIncludeOpinionMining(true));
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n", aspectSentiment.getSentiment(),
aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(), opinionSentiment.isNegated());
}
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs = Arrays.asList(
new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.")
.setLanguage("en"),
new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.")
.setLanguage("en")
);
Response<AnalyzeSentimentResultCollection> response =
textAnalyticsClient.analyzeSentimentBatchWithResponse(textDocumentInputs,
new TextAnalyticsRequestOptions().setIncludeStatistics(true), Context.NONE);
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("A batch of documents statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf(
"Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
documentSentiment.getSentiment(),
documentSentiment.getConfidenceScores().getPositive(),
documentSentiment.getConfidenceScores().getNeutral(),
documentSentiment.getConfidenceScores().getNegative());
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f,"
+ " negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
});
});
}
/**
* Code snippet for {@link TextAnalyticsClient
*/
} |
After revisited the purpose of this codesnippet, found set the TextAnalyticsRequestOptions to null is wrong. The purpose of this codesnippet is to show the how to use API with TextAnalyticsRequestOptions. So I will update it to include statistics info. | public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", new TextAnalyticsRequestOptions()).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
} | textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", new TextAnalyticsRequestOptions()).subscribe( | public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
This is final, does it mean it is always null on this model class? | public SentenceSentiment(String text, TextSentiment sentiment, SentimentConfidenceScores confidenceScores) {
this.text = text;
this.sentiment = sentiment;
this.minedOpinions = null;
this.confidenceScores = confidenceScores;
} | this.minedOpinions = null; | public SentenceSentiment(String text, TextSentiment sentiment, SentimentConfidenceScores confidenceScores) {
this.text = text;
this.sentiment = sentiment;
this.minedOpinions = null;
this.confidenceScores = confidenceScores;
} | class SentenceSentiment {
private final String text;
private final SentimentConfidenceScores confidenceScores;
private final TextSentiment sentiment;
private final IterableStream<MinedOpinions> minedOpinions;
/**
* Creates a {@link SentenceSentiment} model that describes the sentiment analysis of sentence.
* @param text The sentence text.
* @param sentiment The sentiment label of the sentence.
* @param confidenceScores The sentiment confidence score (Softmax score) between 0 and 1, for each sentiment label.
* Higher values signify higher confidence.
*/
/**
* Creates a {@link SentenceSentiment} model that describes the sentiment analysis of sentence.
* @param text The sentence text.
* @param sentiment The sentiment label of the sentence.
* @param minedOpinions The mined opinions of the sentence sentiment. This is only returned if you pass the
* opinion mining parameter to the analyze sentiment APIs.
* @param confidenceScores The sentiment confidence score (Softmax score) between 0 and 1, for each sentiment label.
* Higher values signify higher confidence.
*/
public SentenceSentiment(String text, TextSentiment sentiment, IterableStream<MinedOpinions> minedOpinions,
SentimentConfidenceScores confidenceScores) {
this.text = text;
this.sentiment = sentiment;
this.minedOpinions = minedOpinions;
this.confidenceScores = confidenceScores;
}
/**
* Get the sentence text property.
*
* @return the text property value.
*/
public String getText() {
return this.text;
}
/**
* Get the text sentiment label: POSITIVE, NEGATIVE, or NEUTRAL.
*
* @return The {@link TextSentiment}.
*/
public TextSentiment getSentiment() {
return sentiment;
}
/**
* Get the mined opinions of sentence sentiment.
* This is only returned if you pass the opinion mining parameter to the analyze sentiment APIs.
*
* @return The mined opinions of sentence sentiment.
*/
public IterableStream<MinedOpinions> getMinedOpinions() {
return minedOpinions;
}
/**
* Get the confidence score of the sentiment label. All score values sum up to 1, the higher the score, the
* higher the confidence in the sentiment.
*
* @return The {@link SentimentConfidenceScores}.
*/
public SentimentConfidenceScores getConfidenceScores() {
return confidenceScores;
}
} | class SentenceSentiment {
private final String text;
private final SentimentConfidenceScores confidenceScores;
private final TextSentiment sentiment;
private final IterableStream<MinedOpinion> minedOpinions;
/**
* Creates a {@link SentenceSentiment} model that describes the sentiment analysis of sentence.
* @param text The sentence text.
* @param sentiment The sentiment label of the sentence.
* @param confidenceScores The sentiment confidence score (Softmax score) between 0 and 1, for each sentiment label.
* Higher values signify higher confidence.
*/
/**
* Creates a {@link SentenceSentiment} model that describes the sentiment analysis of sentence.
* @param text The sentence text.
* @param sentiment The sentiment label of the sentence.
* @param minedOpinions The mined opinions of the sentence sentiment. This is only returned if you pass the
* opinion mining parameter to the analyze sentiment APIs.
* @param confidenceScores The sentiment confidence score (Softmax score) between 0 and 1, for each sentiment label.
* Higher values signify higher confidence.
*/
public SentenceSentiment(String text, TextSentiment sentiment, IterableStream<MinedOpinion> minedOpinions,
SentimentConfidenceScores confidenceScores) {
this.text = text;
this.sentiment = sentiment;
this.minedOpinions = minedOpinions;
this.confidenceScores = confidenceScores;
}
/**
* Get the sentence text property.
*
* @return the text property value.
*/
public String getText() {
return this.text;
}
/**
* Get the text sentiment label: POSITIVE, NEGATIVE, or NEUTRAL.
*
* @return The {@link TextSentiment}.
*/
public TextSentiment getSentiment() {
return sentiment;
}
/**
* Get the mined opinions of sentence sentiment.
* This is only returned if you pass the opinion mining parameter to the analyze sentiment APIs.
*
* @return The mined opinions of sentence sentiment.
*/
public IterableStream<MinedOpinion> getMinedOpinions() {
return minedOpinions;
}
/**
* Get the confidence score of the sentiment label. All score values sum up to 1, the higher the score, the
* higher the confidence in the sentiment.
*
* @return The {@link SentimentConfidenceScores}.
*/
public SentimentConfidenceScores getConfidenceScores() {
return confidenceScores;
}
} |
This happens in other places of your tests... I see how you always replace null for TextAnalyticsRequestOptions . if you leave it null, will the code compile? I am asking this because I want to make sure this is not because of the addition of AnalyzeSentimentOptions | public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", new TextAnalyticsRequestOptions()).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
} | textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", new TextAnalyticsRequestOptions()).subscribe( | public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en",
new TextAnalyticsRequestOptions().setIncludeStatistics(true)).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the aspect negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguageWithOpinionMining() {
textAnalyticsAsyncClient.analyzeSentiment("The hotel was dark and unclean.", "en",
new AnalyzeSentimentOptions().setIncludeOpinionMining(true))
.subscribe(documentSentiment -> {
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf("\t\t'%s' sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptionsAndOpinionMining() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions().setIncludeOpinionMining(true);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", options).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverloadWithOpinionMining() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
AnalyzeSentimentOptions options = new AnalyzeSentimentOptions()
.setIncludeOpinionMining(true).setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, options)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
documentSentiment.getSentences().forEach(sentenceSentiment -> {
System.out.printf("\tSentence sentiment: %s%n", sentenceSentiment.getSentiment());
sentenceSentiment.getMinedOpinions().forEach(minedOpinions -> {
AspectSentiment aspectSentiment = minedOpinions.getAspect();
System.out.printf("\t\tAspect sentiment: %s, aspect text: %s%n",
aspectSentiment.getSentiment(), aspectSentiment.getText());
for (OpinionSentiment opinionSentiment : minedOpinions.getOpinions()) {
System.out.printf(
"\t\t\t'%s' opinion sentiment because of \"%s\". Is the opinion negated: %s.%n",
opinionSentiment.getSentiment(), opinionSentiment.getText(),
opinionSentiment.isNegated());
}
});
});
});
});
}
} |
if user uses this constructor, that means the the minedOpinion will always be null. null minedOpinon means user doesn't want to include opinion mining in the request. empty list of minedOpinion has different meaning. It means the user wants the opinion mining but have no opinion returns, so it is an empty list of opinions. | public SentenceSentiment(String text, TextSentiment sentiment, SentimentConfidenceScores confidenceScores) {
this.text = text;
this.sentiment = sentiment;
this.minedOpinions = null;
this.confidenceScores = confidenceScores;
} | this.minedOpinions = null; | public SentenceSentiment(String text, TextSentiment sentiment, SentimentConfidenceScores confidenceScores) {
this.text = text;
this.sentiment = sentiment;
this.minedOpinions = null;
this.confidenceScores = confidenceScores;
} | class SentenceSentiment {
private final String text;
private final SentimentConfidenceScores confidenceScores;
private final TextSentiment sentiment;
private final IterableStream<MinedOpinions> minedOpinions;
/**
* Creates a {@link SentenceSentiment} model that describes the sentiment analysis of sentence.
* @param text The sentence text.
* @param sentiment The sentiment label of the sentence.
* @param confidenceScores The sentiment confidence score (Softmax score) between 0 and 1, for each sentiment label.
* Higher values signify higher confidence.
*/
/**
* Creates a {@link SentenceSentiment} model that describes the sentiment analysis of sentence.
* @param text The sentence text.
* @param sentiment The sentiment label of the sentence.
* @param minedOpinions The mined opinions of the sentence sentiment. This is only returned if you pass the
* opinion mining parameter to the analyze sentiment APIs.
* @param confidenceScores The sentiment confidence score (Softmax score) between 0 and 1, for each sentiment label.
* Higher values signify higher confidence.
*/
public SentenceSentiment(String text, TextSentiment sentiment, IterableStream<MinedOpinions> minedOpinions,
SentimentConfidenceScores confidenceScores) {
this.text = text;
this.sentiment = sentiment;
this.minedOpinions = minedOpinions;
this.confidenceScores = confidenceScores;
}
/**
* Get the sentence text property.
*
* @return the text property value.
*/
public String getText() {
return this.text;
}
/**
* Get the text sentiment label: POSITIVE, NEGATIVE, or NEUTRAL.
*
* @return The {@link TextSentiment}.
*/
public TextSentiment getSentiment() {
return sentiment;
}
/**
* Get the mined opinions of sentence sentiment.
* This is only returned if you pass the opinion mining parameter to the analyze sentiment APIs.
*
* @return The mined opinions of sentence sentiment.
*/
public IterableStream<MinedOpinions> getMinedOpinions() {
return minedOpinions;
}
/**
* Get the confidence score of the sentiment label. All score values sum up to 1, the higher the score, the
* higher the confidence in the sentiment.
*
* @return The {@link SentimentConfidenceScores}.
*/
public SentimentConfidenceScores getConfidenceScores() {
return confidenceScores;
}
} | class SentenceSentiment {
private final String text;
private final SentimentConfidenceScores confidenceScores;
private final TextSentiment sentiment;
private final IterableStream<MinedOpinion> minedOpinions;
/**
* Creates a {@link SentenceSentiment} model that describes the sentiment analysis of sentence.
* @param text The sentence text.
* @param sentiment The sentiment label of the sentence.
* @param confidenceScores The sentiment confidence score (Softmax score) between 0 and 1, for each sentiment label.
* Higher values signify higher confidence.
*/
/**
* Creates a {@link SentenceSentiment} model that describes the sentiment analysis of sentence.
* @param text The sentence text.
* @param sentiment The sentiment label of the sentence.
* @param minedOpinions The mined opinions of the sentence sentiment. This is only returned if you pass the
* opinion mining parameter to the analyze sentiment APIs.
* @param confidenceScores The sentiment confidence score (Softmax score) between 0 and 1, for each sentiment label.
* Higher values signify higher confidence.
*/
public SentenceSentiment(String text, TextSentiment sentiment, IterableStream<MinedOpinion> minedOpinions,
SentimentConfidenceScores confidenceScores) {
this.text = text;
this.sentiment = sentiment;
this.minedOpinions = minedOpinions;
this.confidenceScores = confidenceScores;
}
/**
* Get the sentence text property.
*
* @return the text property value.
*/
public String getText() {
return this.text;
}
/**
* Get the text sentiment label: POSITIVE, NEGATIVE, or NEUTRAL.
*
* @return The {@link TextSentiment}.
*/
public TextSentiment getSentiment() {
return sentiment;
}
/**
* Get the mined opinions of sentence sentiment.
* This is only returned if you pass the opinion mining parameter to the analyze sentiment APIs.
*
* @return The mined opinions of sentence sentiment.
*/
public IterableStream<MinedOpinion> getMinedOpinions() {
return minedOpinions;
}
/**
* Get the confidence score of the sentiment label. All score values sum up to 1, the higher the score, the
* higher the confidence in the sentiment.
*
* @return The {@link SentimentConfidenceScores}.
*/
public SentimentConfidenceScores getConfidenceScores() {
return confidenceScores;
}
} |
Redundant escape characters, seems like this should work? >final String patternRegex = "#/documents/(\\d+)/sentences/(\\d+)/opinions/(\\d+)"; | int[] parseRefPointerToIndexArray(String opinionPointer) {
final String patternRegex = "
final Pattern pattern = Pattern.compile(patternRegex);
final Matcher matcher = pattern.matcher(opinionPointer);
final boolean isMatched = matcher.find();
final int[] result = new int[3];
if (isMatched) {
String[] segments = opinionPointer.split("/");
result[0] = Integer.parseInt(segments[2]);
result[1] = Integer.parseInt(segments[4]);
result[2] = Integer.parseInt(segments[6]);
} else {
throw logger.logExceptionAsError(new IllegalStateException(
String.format("'%s' is not a valid opinion pointer.", opinionPointer)));
}
return result;
} | final String patternRegex = " | int[] parseRefPointerToIndexArray(String opinionPointer) {
final String patternRegex = "
final Pattern pattern = Pattern.compile(patternRegex);
final Matcher matcher = pattern.matcher(opinionPointer);
final boolean isMatched = matcher.find();
final int[] result = new int[3];
if (isMatched) {
String[] segments = opinionPointer.split("/");
result[0] = Integer.parseInt(segments[2]);
result[1] = Integer.parseInt(segments[4]);
result[2] = Integer.parseInt(segments[6]);
} else {
throw logger.logExceptionAsError(new IllegalStateException(
String.format("'%s' is not a valid opinion pointer.", opinionPointer)));
}
return result;
} | class AnalyzeSentimentAsyncClient {
private static final int NEUTRAL_SCORE_ZERO = 0;
private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class);
private final TextAnalyticsClientImpl service;
/**
* Create an {@link AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment
* analysis endpoint.
*
* @param service The proxy service used to perform REST calls.
*/
AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
/**
* Helper function for calling service with max overloaded parameters that returns a mono {@link Response}
* which contains {@link AnalyzeSentimentResultCollection}.
*
* @param documents The list of documents to analyze sentiments for.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
*
* @return A mono {@link Response} contains {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
public Mono<Response<AnalyzeSentimentResultCollection>> analyzeSentimentBatch(
Iterable<TextDocumentInput> documents, AnalyzeSentimentOptions options) {
try {
inputDocumentsValidation(documents);
return withContext(context -> getAnalyzedSentimentResponse(documents, options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Helper function for calling service with max overloaded parameters that returns a mono {@link Response}
* which contains {@link AnalyzeSentimentResultCollection}.
*
* @param documents The list of documents to analyze sentiments for.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A mono {@link Response} contains {@link AnalyzeSentimentResultCollection}.
*/
Mono<Response<AnalyzeSentimentResultCollection>> analyzeSentimentBatchWithContext(
Iterable<TextDocumentInput> documents, AnalyzeSentimentOptions options, Context context) {
try {
inputDocumentsValidation(documents);
return getAnalyzedSentimentResponse(documents, options, context);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Helper method to convert the service response of {@link SentimentResponse} to {@link Response} that contains
* {@link AnalyzeSentimentResultCollection}.
*
* @param response The {@link Response} of {@link SentimentResponse} returned by the service.
*
* @return A {@link Response} contains {@link AnalyzeSentimentResultCollection}.
*/
private Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse(
Response<SentimentResponse> response) {
final SentimentResponse sentimentResponse = response.getValue();
final List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>();
final List<DocumentSentiment> documentSentiments = sentimentResponse.getDocuments();
for (DocumentSentiment documentSentiment : documentSentiments) {
analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment, documentSentiments));
}
for (DocumentError documentError : sentimentResponse.getErrors()) {
analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null,
toTextAnalyticsError(documentError.getError()), null));
}
return new SimpleResponse<>(response,
new AnalyzeSentimentResultCollection(analyzeSentimentResults, sentimentResponse.getModelVersion(),
sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())));
}
/**
* Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}.
*
* @param documentSentiment The {@link DocumentSentiment} returned by the service.
* @param documentSentimentList The document sentiment list returned by the service.
*
* @return The {@link AnalyzeSentimentResult} to be returned by the SDK.
*/
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(DocumentSentiment documentSentiment,
List<DocumentSentiment> documentSentimentList) {
final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getConfidenceScores();
final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream()
.map(sentenceSentiment -> {
final SentimentConfidenceScorePerLabel confidenceScorePerSentence =
sentenceSentiment.getConfidenceScores();
final SentenceSentimentValue sentenceSentimentValue = sentenceSentiment.getSentiment();
return new SentenceSentiment(sentenceSentiment.getText(),
TextSentiment.fromString(sentenceSentimentValue == null ? null : sentenceSentimentValue.toString()),
toMinedOpinionList(sentenceSentiment, documentSentimentList),
new SentimentConfidenceScores(confidenceScorePerSentence.getNegative(),
confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive())
);
}).collect(Collectors.toList());
final List<TextAnalyticsWarning> warnings = documentSentiment.getWarnings().stream().map(
warning -> {
final WarningCodeValue warningCodeValue = warning.getCode();
return new TextAnalyticsWarning(
WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()),
warning.getMessage());
}).collect(Collectors.toList());
final DocumentSentimentValue documentSentimentValue = documentSentiment.getSentiment();
return new AnalyzeSentimentResult(
documentSentiment.getId(),
documentSentiment.getStatistics() == null
? null : toTextDocumentStatistics(documentSentiment.getStatistics()),
null,
new com.azure.ai.textanalytics.models.DocumentSentiment(
TextSentiment.fromString(documentSentimentValue == null ? null : documentSentimentValue.toString()),
new SentimentConfidenceScores(
confidenceScorePerLabel.getNegative(),
confidenceScorePerLabel.getNeutral(),
confidenceScorePerLabel.getPositive()),
new IterableStream<>(sentenceSentiments),
new IterableStream<>(warnings)));
}
/**
* Call the service with REST response, convert to a {@link Mono} of {@link Response} which contains
* {@link AnalyzeSentimentResultCollection} from a {@link SimpleResponse} of {@link SentimentResponse}.
*
* @param documents A list of documents to be analyzed.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A mono {@link Response} contains {@link AnalyzeSentimentResultCollection}.
*/
private Mono<Response<AnalyzeSentimentResultCollection>> getAnalyzedSentimentResponse(
Iterable<TextDocumentInput> documents, AnalyzeSentimentOptions options, Context context) {
return service.sentimentWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
options == null ? null : options.isIncludeOpinionMining(),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Analyzed sentiment for a batch of documents - {}", response))
.doOnError(error -> logger.warning("Failed to analyze sentiment - {}", error))
.map(this::toAnalyzeSentimentResultCollectionResponse)
.onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable));
}
/*
* Transform SentenceSentiment's opinion mining to output that user can use.
*/
private IterableStream<MinedOpinion> toMinedOpinionList(
com.azure.ai.textanalytics.implementation.models.SentenceSentiment sentenceSentiment,
List<DocumentSentiment> documentSentimentList) {
final List<SentenceAspect> sentenceAspects = sentenceSentiment.getAspects();
if (sentenceAspects == null) {
return null;
}
final List<MinedOpinion> minedOpinions = new ArrayList<>();
sentenceAspects.forEach(sentenceAspect -> {
final List<OpinionSentiment> opinionSentiments = new ArrayList<>();
sentenceAspect.getRelations().forEach(aspectRelation -> {
final AspectRelationType aspectRelationType = aspectRelation.getRelationType();
final String opinionPointer = aspectRelation.getRef();
if (AspectRelationType.OPINION == aspectRelationType) {
opinionSentiments.add(toOpinionSentiment(
findSentimentOpinion(opinionPointer, documentSentimentList)));
}
});
minedOpinions.add(new MinedOpinion(
new AspectSentiment(sentenceAspect.getText(),
TextSentiment.fromString(sentenceAspect.getSentiment().toString()),
sentenceAspect.getOffset(), sentenceAspect.getLength(),
toSentimentConfidenceScores(sentenceAspect.getConfidenceScores())),
new IterableStream<>(opinionSentiments)));
});
return new IterableStream<>(minedOpinions);
}
/*
* Transform type AspectConfidenceScoreLabel to SentimentConfidenceScores.
*/
private SentimentConfidenceScores toSentimentConfidenceScores(
AspectConfidenceScoreLabel aspectConfidenceScoreLabel) {
return new SentimentConfidenceScores(aspectConfidenceScoreLabel.getNegative(), NEUTRAL_SCORE_ZERO,
aspectConfidenceScoreLabel.getPositive());
}
/*
* Transform type SentenceOpinion to OpinionSentiment.
*/
private OpinionSentiment toOpinionSentiment(SentenceOpinion sentenceOpinion) {
return new OpinionSentiment(sentenceOpinion.getText(),
TextSentiment.fromString(sentenceOpinion.getSentiment().toString()),
sentenceOpinion.getOffset(), sentenceOpinion.getLength(), sentenceOpinion.isNegated(),
toSentimentConfidenceScores(sentenceOpinion.getConfidenceScores()));
}
/*
* Parses the reference pointer to an index array that contains document, sentence, and opinion indexes.
*/
/*
* Find the specific sentence opinion in the document sentiment list by given the opinion reference pointer.
*/
SentenceOpinion findSentimentOpinion(String opinionPointer, List<DocumentSentiment> documentSentiments) {
final int[] opinionIndexes = parseRefPointerToIndexArray(opinionPointer);
final int documentIndex = opinionIndexes[0];
final int sentenceIndex = opinionIndexes[1];
final int opinionIndex = opinionIndexes[2];
if (documentIndex >= documentSentiments.size()) {
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Invalid document index '%s' in '%s'.", documentIndex, opinionPointer)));
}
final DocumentSentiment documentsentiment = documentSentiments.get(documentIndex);
final List<com.azure.ai.textanalytics.implementation.models.SentenceSentiment> sentenceSentiments =
documentsentiment.getSentences();
if (sentenceIndex >= sentenceSentiments.size()) {
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Invalid sentence index '%s' in '%s'.", sentenceIndex, opinionPointer)));
}
final List<SentenceOpinion> opinions = sentenceSentiments.get(sentenceIndex).getOpinions();
if (opinionIndex >= opinions.size()) {
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Invalid opinion index '%s' in '%s'.", opinionIndex, opinionPointer)));
}
return opinions.get(opinionIndex);
}
} | class AnalyzeSentimentAsyncClient {
private static final int NEUTRAL_SCORE_ZERO = 0;
private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class);
private final TextAnalyticsClientImpl service;
/**
* Create an {@link AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment
* analysis endpoint.
*
* @param service The proxy service used to perform REST calls.
*/
AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
/**
* Helper function for calling service with max overloaded parameters that returns a mono {@link Response}
* which contains {@link AnalyzeSentimentResultCollection}.
*
* @param documents The list of documents to analyze sentiments for.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
*
* @return A mono {@link Response} contains {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
public Mono<Response<AnalyzeSentimentResultCollection>> analyzeSentimentBatch(
Iterable<TextDocumentInput> documents, AnalyzeSentimentOptions options) {
try {
inputDocumentsValidation(documents);
return withContext(context -> getAnalyzedSentimentResponse(documents, options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Helper function for calling service with max overloaded parameters that returns a mono {@link Response}
* which contains {@link AnalyzeSentimentResultCollection}.
*
* @param documents The list of documents to analyze sentiments for.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A mono {@link Response} contains {@link AnalyzeSentimentResultCollection}.
*/
Mono<Response<AnalyzeSentimentResultCollection>> analyzeSentimentBatchWithContext(
Iterable<TextDocumentInput> documents, AnalyzeSentimentOptions options, Context context) {
try {
inputDocumentsValidation(documents);
return getAnalyzedSentimentResponse(documents, options, context);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Helper method to convert the service response of {@link SentimentResponse} to {@link Response} that contains
* {@link AnalyzeSentimentResultCollection}.
*
* @param response The {@link Response} of {@link SentimentResponse} returned by the service.
*
* @return A {@link Response} contains {@link AnalyzeSentimentResultCollection}.
*/
private Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse(
Response<SentimentResponse> response) {
final SentimentResponse sentimentResponse = response.getValue();
final List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>();
final List<DocumentSentiment> documentSentiments = sentimentResponse.getDocuments();
for (DocumentSentiment documentSentiment : documentSentiments) {
analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment, documentSentiments));
}
for (DocumentError documentError : sentimentResponse.getErrors()) {
analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null,
toTextAnalyticsError(documentError.getError()), null));
}
return new SimpleResponse<>(response,
new AnalyzeSentimentResultCollection(analyzeSentimentResults, sentimentResponse.getModelVersion(),
sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())));
}
/**
* Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}.
*
* @param documentSentiment The {@link DocumentSentiment} returned by the service.
* @param documentSentimentList The document sentiment list returned by the service.
*
* @return The {@link AnalyzeSentimentResult} to be returned by the SDK.
*/
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(DocumentSentiment documentSentiment,
List<DocumentSentiment> documentSentimentList) {
final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getConfidenceScores();
final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream()
.map(sentenceSentiment -> {
final SentimentConfidenceScorePerLabel confidenceScorePerSentence =
sentenceSentiment.getConfidenceScores();
final SentenceSentimentValue sentenceSentimentValue = sentenceSentiment.getSentiment();
return new SentenceSentiment(sentenceSentiment.getText(),
TextSentiment.fromString(sentenceSentimentValue == null ? null : sentenceSentimentValue.toString()),
toMinedOpinionList(sentenceSentiment, documentSentimentList),
new SentimentConfidenceScores(confidenceScorePerSentence.getNegative(),
confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive())
);
}).collect(Collectors.toList());
final List<TextAnalyticsWarning> warnings = documentSentiment.getWarnings().stream().map(
warning -> {
final WarningCodeValue warningCodeValue = warning.getCode();
return new TextAnalyticsWarning(
WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()),
warning.getMessage());
}).collect(Collectors.toList());
final DocumentSentimentValue documentSentimentValue = documentSentiment.getSentiment();
return new AnalyzeSentimentResult(
documentSentiment.getId(),
documentSentiment.getStatistics() == null
? null : toTextDocumentStatistics(documentSentiment.getStatistics()),
null,
new com.azure.ai.textanalytics.models.DocumentSentiment(
TextSentiment.fromString(documentSentimentValue == null ? null : documentSentimentValue.toString()),
new SentimentConfidenceScores(
confidenceScorePerLabel.getNegative(),
confidenceScorePerLabel.getNeutral(),
confidenceScorePerLabel.getPositive()),
new IterableStream<>(sentenceSentiments),
new IterableStream<>(warnings)));
}
/**
* Call the service with REST response, convert to a {@link Mono} of {@link Response} which contains
* {@link AnalyzeSentimentResultCollection} from a {@link SimpleResponse} of {@link SentimentResponse}.
*
* @param documents A list of documents to be analyzed.
* @param options The additional configurable {@link AnalyzeSentimentOptions options} that may be passed when
* analyzing sentiments.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A mono {@link Response} contains {@link AnalyzeSentimentResultCollection}.
*/
private Mono<Response<AnalyzeSentimentResultCollection>> getAnalyzedSentimentResponse(
Iterable<TextDocumentInput> documents, AnalyzeSentimentOptions options, Context context) {
return service.sentimentWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
options == null ? null : options.isIncludeOpinionMining(),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Analyzed sentiment for a batch of documents - {}", response))
.doOnError(error -> logger.warning("Failed to analyze sentiment - {}", error))
.map(this::toAnalyzeSentimentResultCollectionResponse)
.onErrorMap(Utility::mapToHttpResponseExceptionIfExist);
}
/*
* Transform SentenceSentiment's opinion mining to output that user can use.
*/
private IterableStream<MinedOpinion> toMinedOpinionList(
com.azure.ai.textanalytics.implementation.models.SentenceSentiment sentenceSentiment,
List<DocumentSentiment> documentSentimentList) {
final List<SentenceAspect> sentenceAspects = sentenceSentiment.getAspects();
if (sentenceAspects == null) {
return null;
}
final List<MinedOpinion> minedOpinions = new ArrayList<>();
sentenceAspects.forEach(sentenceAspect -> {
final List<OpinionSentiment> opinionSentiments = new ArrayList<>();
sentenceAspect.getRelations().forEach(aspectRelation -> {
final AspectRelationType aspectRelationType = aspectRelation.getRelationType();
final String opinionPointer = aspectRelation.getRef();
if (AspectRelationType.OPINION == aspectRelationType) {
opinionSentiments.add(toOpinionSentiment(
findSentimentOpinion(opinionPointer, documentSentimentList)));
}
});
minedOpinions.add(new MinedOpinion(
new AspectSentiment(sentenceAspect.getText(),
TextSentiment.fromString(sentenceAspect.getSentiment().toString()),
sentenceAspect.getOffset(), sentenceAspect.getLength(),
toSentimentConfidenceScores(sentenceAspect.getConfidenceScores())),
new IterableStream<>(opinionSentiments)));
});
return new IterableStream<>(minedOpinions);
}
/*
* Transform type AspectConfidenceScoreLabel to SentimentConfidenceScores.
*/
private SentimentConfidenceScores toSentimentConfidenceScores(
AspectConfidenceScoreLabel aspectConfidenceScoreLabel) {
return new SentimentConfidenceScores(aspectConfidenceScoreLabel.getNegative(), NEUTRAL_SCORE_ZERO,
aspectConfidenceScoreLabel.getPositive());
}
/*
* Transform type SentenceOpinion to OpinionSentiment.
*/
private OpinionSentiment toOpinionSentiment(SentenceOpinion sentenceOpinion) {
return new OpinionSentiment(sentenceOpinion.getText(),
TextSentiment.fromString(sentenceOpinion.getSentiment().toString()),
sentenceOpinion.getOffset(), sentenceOpinion.getLength(), sentenceOpinion.isNegated(),
toSentimentConfidenceScores(sentenceOpinion.getConfidenceScores()));
}
/*
* Parses the reference pointer to an index array that contains document, sentence, and opinion indexes.
*/
/*
* Find the specific sentence opinion in the document sentiment list by given the opinion reference pointer.
*/
SentenceOpinion findSentimentOpinion(String opinionPointer, List<DocumentSentiment> documentSentiments) {
final int[] opinionIndexes = parseRefPointerToIndexArray(opinionPointer);
final int documentIndex = opinionIndexes[0];
final int sentenceIndex = opinionIndexes[1];
final int opinionIndex = opinionIndexes[2];
if (documentIndex >= documentSentiments.size()) {
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Invalid document index '%s' in '%s'.", documentIndex, opinionPointer)));
}
final DocumentSentiment documentsentiment = documentSentiments.get(documentIndex);
final List<com.azure.ai.textanalytics.implementation.models.SentenceSentiment> sentenceSentiments =
documentsentiment.getSentences();
if (sentenceIndex >= sentenceSentiments.size()) {
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Invalid sentence index '%s' in '%s'.", sentenceIndex, opinionPointer)));
}
final List<SentenceOpinion> opinions = sentenceSentiments.get(sentenceIndex).getOpinions();
if (opinionIndex >= opinions.size()) {
throw logger.logExceptionAsError(new IllegalStateException(
String.format("Invalid opinion index '%s' in '%s'.", opinionIndex, opinionPointer)));
}
return opinions.get(opinionIndex);
}
} |
Sorry to keep asking this, but it looks like we decided to allow null for options when they don't have a required value? | Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
} | options = (options == null) ? new AppendBlobSealOptions() : options; | new AppendBlobSealOptions())
.flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
} | class AppendBlobAsyncClient extends BlobAsyncClientBase {
private final ClientLogger logger = new ClientLogger(AppendBlobAsyncClient.class);
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = 4 * Constants.MB;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = 50000;
/**
* Package-private constructor for use by {@link SpecializedBlobClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param containerName The container name.
* @param blobName The blob name.
* @param snapshot The snapshot identifier for the blob, pass {@code null} to interact with the blob directly.
* @param customerProvidedKey Customer provided key used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param encryptionScope Encryption scope used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param versionId The version identifier for the blob, pass {@code null} to interact with the latest blob version.
*/
AppendBlobAsyncClient(HttpPipeline pipeline, String url, BlobServiceVersion serviceVersion,
String accountName, String containerName, String blobName, String snapshot, CpkInfo customerProvidedKey,
EncryptionScope encryptionScope, String versionId) {
super(pipeline, url, serviceVersion, accountName, containerName, blobName, snapshot, customerProvidedKey,
encryptionScope, versionId);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create}
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create(boolean overwrite) {
try {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param requestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions));
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options) {
try {
return withContext(context -> createWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options, Context context) {
options = (options == null) ? new AppendBlobCreateOptions() : options;
BlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new BlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().createWithRestResponseAsync(null, null, 0, null,
options.getMetadata(), requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
tagsToString(options.getTags()), options.getHeaders(), getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobCreateHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(), null, null,
hd.getVersionId());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlock
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlock(Flux<ByteBuffer> data, long length) {
try {
return appendBlockWithResponse(data, length, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockWithResponse
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions) {
try {
return withContext(context ->
appendBlockWithResponse(data, length, contentMd5, appendBlobRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Context context) {
appendBlobRequestConditions = appendBlobRequestConditions == null ? new AppendBlobRequestConditions()
: appendBlobRequestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockWithRestResponseAsync(
null, null, data, length, null, contentMd5, null, appendBlobRequestConditions.getLeaseId(),
appendBlobRequestConditions.getMaxSize(), appendBlobRequestConditions.getAppendPosition(),
appendBlobRequestConditions.getIfModifiedSince(), appendBlobRequestConditions.getIfUnmodifiedSince(),
appendBlobRequestConditions.getIfMatch(), appendBlobRequestConditions.getIfNoneMatch(),
appendBlobRequestConditions.getTagsConditions(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
try {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMD5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions) {
try {
return withContext(context ->
appendBlockFromUrlWithResponse(sourceUrl, sourceRange, sourceContentMD5,
destRequestConditions, sourceRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
RequestConditions sourceRequestConditions, Context context) {
sourceRange = (sourceRange == null) ? new BlobRange(0) : sourceRange;
destRequestConditions = (destRequestConditions == null)
? new AppendBlobRequestConditions() : destRequestConditions;
sourceRequestConditions = (sourceRequestConditions == null)
? new RequestConditions() : sourceRequestConditions;
URL url;
try {
url = new URL(sourceUrl);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sourceUrl' is not a valid url."));
}
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockFromUrlWithRestResponseAsync(null, null, url, 0,
sourceRange.toString(), sourceContentMD5, null, null, null, destRequestConditions.getLeaseId(),
destRequestConditions.getMaxSize(), destRequestConditions.getAppendPosition(),
destRequestConditions.getIfModifiedSince(), destRequestConditions.getIfUnmodifiedSince(),
destRequestConditions.getIfMatch(), destRequestConditions.getIfNoneMatch(),
destRequestConditions.getTagsConditions(), sourceRequestConditions.getIfModifiedSince(),
sourceRequestConditions.getIfUnmodifiedSince(), sourceRequestConditions.getIfMatch(),
sourceRequestConditions.getIfNoneMatch(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockFromUrlHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.seal}
*
* @return A reactive response signalling completion.
*/
public Mono<Void> seal() {
try {
return sealWithResponse(
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options) {
try {
return withContext(context -> sealWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
}
} | class AppendBlobAsyncClient extends BlobAsyncClientBase {
private final ClientLogger logger = new ClientLogger(AppendBlobAsyncClient.class);
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = 4 * Constants.MB;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = 50000;
/**
* Package-private constructor for use by {@link SpecializedBlobClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param containerName The container name.
* @param blobName The blob name.
* @param snapshot The snapshot identifier for the blob, pass {@code null} to interact with the blob directly.
* @param customerProvidedKey Customer provided key used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param encryptionScope Encryption scope used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param versionId The version identifier for the blob, pass {@code null} to interact with the latest blob version.
*/
AppendBlobAsyncClient(HttpPipeline pipeline, String url, BlobServiceVersion serviceVersion,
String accountName, String containerName, String blobName, String snapshot, CpkInfo customerProvidedKey,
EncryptionScope encryptionScope, String versionId) {
super(pipeline, url, serviceVersion, accountName, containerName, blobName, snapshot, customerProvidedKey,
encryptionScope, versionId);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create}
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create(boolean overwrite) {
try {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param requestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions));
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options) {
try {
return withContext(context -> createWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options, Context context) {
options = (options == null) ? new AppendBlobCreateOptions() : options;
BlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new BlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().createWithRestResponseAsync(null, null, 0, null,
options.getMetadata(), requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
tagsToString(options.getTags()), options.getHeaders(), getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobCreateHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(), null, null,
hd.getVersionId());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlock
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlock(Flux<ByteBuffer> data, long length) {
try {
return appendBlockWithResponse(data, length, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockWithResponse
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions) {
try {
return withContext(context ->
appendBlockWithResponse(data, length, contentMd5, appendBlobRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Context context) {
appendBlobRequestConditions = appendBlobRequestConditions == null ? new AppendBlobRequestConditions()
: appendBlobRequestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockWithRestResponseAsync(
null, null, data, length, null, contentMd5, null, appendBlobRequestConditions.getLeaseId(),
appendBlobRequestConditions.getMaxSize(), appendBlobRequestConditions.getAppendPosition(),
appendBlobRequestConditions.getIfModifiedSince(), appendBlobRequestConditions.getIfUnmodifiedSince(),
appendBlobRequestConditions.getIfMatch(), appendBlobRequestConditions.getIfNoneMatch(),
appendBlobRequestConditions.getTagsConditions(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
try {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMD5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions) {
try {
return withContext(context ->
appendBlockFromUrlWithResponse(sourceUrl, sourceRange, sourceContentMD5,
destRequestConditions, sourceRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
RequestConditions sourceRequestConditions, Context context) {
sourceRange = (sourceRange == null) ? new BlobRange(0) : sourceRange;
destRequestConditions = (destRequestConditions == null)
? new AppendBlobRequestConditions() : destRequestConditions;
sourceRequestConditions = (sourceRequestConditions == null)
? new RequestConditions() : sourceRequestConditions;
URL url;
try {
url = new URL(sourceUrl);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sourceUrl' is not a valid url."));
}
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockFromUrlWithRestResponseAsync(null, null, url, 0,
sourceRange.toString(), sourceContentMD5, null, null, null, destRequestConditions.getLeaseId(),
destRequestConditions.getMaxSize(), destRequestConditions.getAppendPosition(),
destRequestConditions.getIfModifiedSince(), destRequestConditions.getIfUnmodifiedSince(),
destRequestConditions.getIfMatch(), destRequestConditions.getIfNoneMatch(),
destRequestConditions.getTagsConditions(), sourceRequestConditions.getIfModifiedSince(),
sourceRequestConditions.getIfUnmodifiedSince(), sourceRequestConditions.getIfMatch(),
sourceRequestConditions.getIfNoneMatch(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockFromUrlHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.seal}
*
* @return A reactive response signalling completion.
*/
public Mono<Void> seal() {
try {
return sealWithResponse(
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options) {
try {
return withContext(context -> sealWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
}
} |
yeah I noticed that we do allow null for options when they dont have a required value. Im gonna create a PR after this making them all have this functionality - just didnt want to have this PR do 2 things at once | Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
} | options = (options == null) ? new AppendBlobSealOptions() : options; | new AppendBlobSealOptions())
.flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
} | class AppendBlobAsyncClient extends BlobAsyncClientBase {
private final ClientLogger logger = new ClientLogger(AppendBlobAsyncClient.class);
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = 4 * Constants.MB;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = 50000;
/**
* Package-private constructor for use by {@link SpecializedBlobClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param containerName The container name.
* @param blobName The blob name.
* @param snapshot The snapshot identifier for the blob, pass {@code null} to interact with the blob directly.
* @param customerProvidedKey Customer provided key used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param encryptionScope Encryption scope used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param versionId The version identifier for the blob, pass {@code null} to interact with the latest blob version.
*/
AppendBlobAsyncClient(HttpPipeline pipeline, String url, BlobServiceVersion serviceVersion,
String accountName, String containerName, String blobName, String snapshot, CpkInfo customerProvidedKey,
EncryptionScope encryptionScope, String versionId) {
super(pipeline, url, serviceVersion, accountName, containerName, blobName, snapshot, customerProvidedKey,
encryptionScope, versionId);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create}
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create(boolean overwrite) {
try {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param requestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions));
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options) {
try {
return withContext(context -> createWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options, Context context) {
options = (options == null) ? new AppendBlobCreateOptions() : options;
BlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new BlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().createWithRestResponseAsync(null, null, 0, null,
options.getMetadata(), requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
tagsToString(options.getTags()), options.getHeaders(), getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobCreateHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(), null, null,
hd.getVersionId());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlock
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlock(Flux<ByteBuffer> data, long length) {
try {
return appendBlockWithResponse(data, length, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockWithResponse
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions) {
try {
return withContext(context ->
appendBlockWithResponse(data, length, contentMd5, appendBlobRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Context context) {
appendBlobRequestConditions = appendBlobRequestConditions == null ? new AppendBlobRequestConditions()
: appendBlobRequestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockWithRestResponseAsync(
null, null, data, length, null, contentMd5, null, appendBlobRequestConditions.getLeaseId(),
appendBlobRequestConditions.getMaxSize(), appendBlobRequestConditions.getAppendPosition(),
appendBlobRequestConditions.getIfModifiedSince(), appendBlobRequestConditions.getIfUnmodifiedSince(),
appendBlobRequestConditions.getIfMatch(), appendBlobRequestConditions.getIfNoneMatch(),
appendBlobRequestConditions.getTagsConditions(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
try {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMD5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions) {
try {
return withContext(context ->
appendBlockFromUrlWithResponse(sourceUrl, sourceRange, sourceContentMD5,
destRequestConditions, sourceRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
RequestConditions sourceRequestConditions, Context context) {
sourceRange = (sourceRange == null) ? new BlobRange(0) : sourceRange;
destRequestConditions = (destRequestConditions == null)
? new AppendBlobRequestConditions() : destRequestConditions;
sourceRequestConditions = (sourceRequestConditions == null)
? new RequestConditions() : sourceRequestConditions;
URL url;
try {
url = new URL(sourceUrl);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sourceUrl' is not a valid url."));
}
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockFromUrlWithRestResponseAsync(null, null, url, 0,
sourceRange.toString(), sourceContentMD5, null, null, null, destRequestConditions.getLeaseId(),
destRequestConditions.getMaxSize(), destRequestConditions.getAppendPosition(),
destRequestConditions.getIfModifiedSince(), destRequestConditions.getIfUnmodifiedSince(),
destRequestConditions.getIfMatch(), destRequestConditions.getIfNoneMatch(),
destRequestConditions.getTagsConditions(), sourceRequestConditions.getIfModifiedSince(),
sourceRequestConditions.getIfUnmodifiedSince(), sourceRequestConditions.getIfMatch(),
sourceRequestConditions.getIfNoneMatch(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockFromUrlHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.seal}
*
* @return A reactive response signalling completion.
*/
public Mono<Void> seal() {
try {
return sealWithResponse(
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options) {
try {
return withContext(context -> sealWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
}
} | class AppendBlobAsyncClient extends BlobAsyncClientBase {
private final ClientLogger logger = new ClientLogger(AppendBlobAsyncClient.class);
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = 4 * Constants.MB;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = 50000;
/**
* Package-private constructor for use by {@link SpecializedBlobClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param containerName The container name.
* @param blobName The blob name.
* @param snapshot The snapshot identifier for the blob, pass {@code null} to interact with the blob directly.
* @param customerProvidedKey Customer provided key used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param encryptionScope Encryption scope used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param versionId The version identifier for the blob, pass {@code null} to interact with the latest blob version.
*/
AppendBlobAsyncClient(HttpPipeline pipeline, String url, BlobServiceVersion serviceVersion,
String accountName, String containerName, String blobName, String snapshot, CpkInfo customerProvidedKey,
EncryptionScope encryptionScope, String versionId) {
super(pipeline, url, serviceVersion, accountName, containerName, blobName, snapshot, customerProvidedKey,
encryptionScope, versionId);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create}
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create(boolean overwrite) {
try {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param requestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions));
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options) {
try {
return withContext(context -> createWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options, Context context) {
options = (options == null) ? new AppendBlobCreateOptions() : options;
BlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new BlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().createWithRestResponseAsync(null, null, 0, null,
options.getMetadata(), requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
tagsToString(options.getTags()), options.getHeaders(), getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobCreateHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(), null, null,
hd.getVersionId());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlock
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlock(Flux<ByteBuffer> data, long length) {
try {
return appendBlockWithResponse(data, length, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockWithResponse
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions) {
try {
return withContext(context ->
appendBlockWithResponse(data, length, contentMd5, appendBlobRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Context context) {
appendBlobRequestConditions = appendBlobRequestConditions == null ? new AppendBlobRequestConditions()
: appendBlobRequestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockWithRestResponseAsync(
null, null, data, length, null, contentMd5, null, appendBlobRequestConditions.getLeaseId(),
appendBlobRequestConditions.getMaxSize(), appendBlobRequestConditions.getAppendPosition(),
appendBlobRequestConditions.getIfModifiedSince(), appendBlobRequestConditions.getIfUnmodifiedSince(),
appendBlobRequestConditions.getIfMatch(), appendBlobRequestConditions.getIfNoneMatch(),
appendBlobRequestConditions.getTagsConditions(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
try {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMD5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions) {
try {
return withContext(context ->
appendBlockFromUrlWithResponse(sourceUrl, sourceRange, sourceContentMD5,
destRequestConditions, sourceRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
RequestConditions sourceRequestConditions, Context context) {
sourceRange = (sourceRange == null) ? new BlobRange(0) : sourceRange;
destRequestConditions = (destRequestConditions == null)
? new AppendBlobRequestConditions() : destRequestConditions;
sourceRequestConditions = (sourceRequestConditions == null)
? new RequestConditions() : sourceRequestConditions;
URL url;
try {
url = new URL(sourceUrl);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sourceUrl' is not a valid url."));
}
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockFromUrlWithRestResponseAsync(null, null, url, 0,
sourceRange.toString(), sourceContentMD5, null, null, null, destRequestConditions.getLeaseId(),
destRequestConditions.getMaxSize(), destRequestConditions.getAppendPosition(),
destRequestConditions.getIfModifiedSince(), destRequestConditions.getIfUnmodifiedSince(),
destRequestConditions.getIfMatch(), destRequestConditions.getIfNoneMatch(),
destRequestConditions.getTagsConditions(), sourceRequestConditions.getIfModifiedSince(),
sourceRequestConditions.getIfUnmodifiedSince(), sourceRequestConditions.getIfMatch(),
sourceRequestConditions.getIfNoneMatch(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockFromUrlHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.seal}
*
* @return A reactive response signalling completion.
*/
public Mono<Void> seal() {
try {
return sealWithResponse(
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options) {
try {
return withContext(context -> sealWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
}
} |
https://github.com/Azure/azure-sdk-for-java/pull/13339/ | Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
} | options = (options == null) ? new AppendBlobSealOptions() : options; | new AppendBlobSealOptions())
.flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
} | class AppendBlobAsyncClient extends BlobAsyncClientBase {
private final ClientLogger logger = new ClientLogger(AppendBlobAsyncClient.class);
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = 4 * Constants.MB;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = 50000;
/**
* Package-private constructor for use by {@link SpecializedBlobClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param containerName The container name.
* @param blobName The blob name.
* @param snapshot The snapshot identifier for the blob, pass {@code null} to interact with the blob directly.
* @param customerProvidedKey Customer provided key used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param encryptionScope Encryption scope used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param versionId The version identifier for the blob, pass {@code null} to interact with the latest blob version.
*/
AppendBlobAsyncClient(HttpPipeline pipeline, String url, BlobServiceVersion serviceVersion,
String accountName, String containerName, String blobName, String snapshot, CpkInfo customerProvidedKey,
EncryptionScope encryptionScope, String versionId) {
super(pipeline, url, serviceVersion, accountName, containerName, blobName, snapshot, customerProvidedKey,
encryptionScope, versionId);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create}
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create(boolean overwrite) {
try {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param requestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions));
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options) {
try {
return withContext(context -> createWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options, Context context) {
options = (options == null) ? new AppendBlobCreateOptions() : options;
BlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new BlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().createWithRestResponseAsync(null, null, 0, null,
options.getMetadata(), requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
tagsToString(options.getTags()), options.getHeaders(), getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobCreateHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(), null, null,
hd.getVersionId());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlock
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlock(Flux<ByteBuffer> data, long length) {
try {
return appendBlockWithResponse(data, length, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockWithResponse
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions) {
try {
return withContext(context ->
appendBlockWithResponse(data, length, contentMd5, appendBlobRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Context context) {
appendBlobRequestConditions = appendBlobRequestConditions == null ? new AppendBlobRequestConditions()
: appendBlobRequestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockWithRestResponseAsync(
null, null, data, length, null, contentMd5, null, appendBlobRequestConditions.getLeaseId(),
appendBlobRequestConditions.getMaxSize(), appendBlobRequestConditions.getAppendPosition(),
appendBlobRequestConditions.getIfModifiedSince(), appendBlobRequestConditions.getIfUnmodifiedSince(),
appendBlobRequestConditions.getIfMatch(), appendBlobRequestConditions.getIfNoneMatch(),
appendBlobRequestConditions.getTagsConditions(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
try {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMD5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions) {
try {
return withContext(context ->
appendBlockFromUrlWithResponse(sourceUrl, sourceRange, sourceContentMD5,
destRequestConditions, sourceRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
RequestConditions sourceRequestConditions, Context context) {
sourceRange = (sourceRange == null) ? new BlobRange(0) : sourceRange;
destRequestConditions = (destRequestConditions == null)
? new AppendBlobRequestConditions() : destRequestConditions;
sourceRequestConditions = (sourceRequestConditions == null)
? new RequestConditions() : sourceRequestConditions;
URL url;
try {
url = new URL(sourceUrl);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sourceUrl' is not a valid url."));
}
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockFromUrlWithRestResponseAsync(null, null, url, 0,
sourceRange.toString(), sourceContentMD5, null, null, null, destRequestConditions.getLeaseId(),
destRequestConditions.getMaxSize(), destRequestConditions.getAppendPosition(),
destRequestConditions.getIfModifiedSince(), destRequestConditions.getIfUnmodifiedSince(),
destRequestConditions.getIfMatch(), destRequestConditions.getIfNoneMatch(),
destRequestConditions.getTagsConditions(), sourceRequestConditions.getIfModifiedSince(),
sourceRequestConditions.getIfUnmodifiedSince(), sourceRequestConditions.getIfMatch(),
sourceRequestConditions.getIfNoneMatch(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockFromUrlHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.seal}
*
* @return A reactive response signalling completion.
*/
public Mono<Void> seal() {
try {
return sealWithResponse(
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options) {
try {
return withContext(context -> sealWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
}
} | class AppendBlobAsyncClient extends BlobAsyncClientBase {
private final ClientLogger logger = new ClientLogger(AppendBlobAsyncClient.class);
/**
* Indicates the maximum number of bytes that can be sent in a call to appendBlock.
*/
public static final int MAX_APPEND_BLOCK_BYTES = 4 * Constants.MB;
/**
* Indicates the maximum number of blocks allowed in an append blob.
*/
public static final int MAX_BLOCKS = 50000;
/**
* Package-private constructor for use by {@link SpecializedBlobClientBuilder}.
*
* @param pipeline The pipeline used to send and receive service requests.
* @param url The endpoint where to send service requests.
* @param serviceVersion The version of the service to receive requests.
* @param accountName The storage account name.
* @param containerName The container name.
* @param blobName The blob name.
* @param snapshot The snapshot identifier for the blob, pass {@code null} to interact with the blob directly.
* @param customerProvidedKey Customer provided key used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param encryptionScope Encryption scope used during encryption of the blob's data on the server, pass
* {@code null} to allow the service to use its own encryption.
* @param versionId The version identifier for the blob, pass {@code null} to interact with the latest blob version.
*/
AppendBlobAsyncClient(HttpPipeline pipeline, String url, BlobServiceVersion serviceVersion,
String accountName, String containerName, String blobName, String snapshot, CpkInfo customerProvidedKey,
EncryptionScope encryptionScope, String versionId) {
super(pipeline, url, serviceVersion, accountName, containerName, blobName, snapshot, customerProvidedKey,
encryptionScope, versionId);
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob. By default this method will
* not overwrite an existing blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create}
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create() {
try {
return create(false);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.create
*
* @param overwrite Whether or not to overwrite, should data exist on the blob.
*
* @return A {@link Mono} containing the information of the created appended blob.
*/
public Mono<AppendBlobItem> create(boolean overwrite) {
try {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions();
if (!overwrite) {
blobRequestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD);
}
return createWithResponse(null, null, blobRequestConditions).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param headers {@link BlobHttpHeaders}
* @param metadata Metadata to associate with the blob.
* @param requestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(BlobHttpHeaders headers, Map<String, String> metadata,
BlobRequestConditions requestConditions) {
return this.createWithResponse(new AppendBlobCreateOptions().setHeaders(headers).setMetadata(metadata)
.setRequestConditions(requestConditions));
}
/**
* Creates a 0-length append blob. Call appendBlock to append data to an append blob.
* <p>
* To avoid overwriting, pass "*" to {@link BlobRequestConditions
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.createWithResponse
*
* @param options {@link AppendBlobCreateOptions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* appended blob.
*/
public Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options) {
try {
return withContext(context -> createWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> createWithResponse(AppendBlobCreateOptions options, Context context) {
options = (options == null) ? new AppendBlobCreateOptions() : options;
BlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new BlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().createWithRestResponseAsync(null, null, 0, null,
options.getMetadata(), requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getTagsConditions(), null,
tagsToString(options.getTags()), options.getHeaders(), getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobCreateHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(), null, null,
hd.getVersionId());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlock
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlock(Flux<ByteBuffer> data, long length) {
try {
return appendBlockWithResponse(data, length, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data to the end of the existing append blob.
* <p>
* Note that the data passed must be replayable if retries are enabled (the default). In other words, the
* {@code Flux} must produce the same data each time it is subscribed to.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockWithResponse
*
* @param data The data to write to the blob. Note that this {@code Flux} must be replayable if retries are enabled
* (the default). In other words, the Flux must produce the same data each time it is subscribed to.
* @param length The exact length of the data. It is important that this value match precisely the length of the
* data emitted by the {@code Flux}.
* @param contentMd5 An MD5 hash of the block content. This hash is used to verify the integrity of the block during
* transport. When this header is specified, the storage service compares the hash of the content that has arrived
* with this header value. Note that this MD5 hash is not stored with the blob. If the two hashes do not match, the
* operation will fail.
* @param appendBlobRequestConditions {@link AppendBlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions) {
try {
return withContext(context ->
appendBlockWithResponse(data, length, contentMd5, appendBlobRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockWithResponse(Flux<ByteBuffer> data, long length, byte[] contentMd5,
AppendBlobRequestConditions appendBlobRequestConditions, Context context) {
appendBlobRequestConditions = appendBlobRequestConditions == null ? new AppendBlobRequestConditions()
: appendBlobRequestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockWithRestResponseAsync(
null, null, data, length, null, contentMd5, null, appendBlobRequestConditions.getLeaseId(),
appendBlobRequestConditions.getMaxSize(), appendBlobRequestConditions.getAppendPosition(),
appendBlobRequestConditions.getIfModifiedSince(), appendBlobRequestConditions.getIfUnmodifiedSince(),
appendBlobRequestConditions.getIfMatch(), appendBlobRequestConditions.getIfNoneMatch(),
appendBlobRequestConditions.getTagsConditions(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrl
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange The source {@link BlobRange} to copy.
* @return {@link Mono} containing the information of the append blob operation.
*/
public Mono<AppendBlobItem> appendBlockFromUrl(String sourceUrl, BlobRange sourceRange) {
try {
return appendBlockFromUrlWithResponse(sourceUrl, sourceRange, null, null, null).flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Commits a new block of data from another blob to the end of this append blob.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.appendBlockFromUrlWithResponse
*
* @param sourceUrl The url to the blob that will be the source of the copy. A source blob in the same storage
* account can be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
* must either be public or must be authenticated via a shared access signature. If the source blob is public, no
* authentication is required to perform the operation.
* @param sourceRange {@link BlobRange}
* @param sourceContentMD5 An MD5 hash of the block content from the source blob. If specified, the service will
* calculate the MD5 of the received data and fail the request if it does not match the provided MD5.
* @param destRequestConditions {@link AppendBlobRequestConditions}
* @param sourceRequestConditions {@link BlobRequestConditions}
* @return A {@link Mono} containing {@link Response} whose {@link Response
* blob operation.
*/
public Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
BlobRequestConditions sourceRequestConditions) {
try {
return withContext(context ->
appendBlockFromUrlWithResponse(sourceUrl, sourceRange, sourceContentMD5,
destRequestConditions, sourceRequestConditions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<AppendBlobItem>> appendBlockFromUrlWithResponse(String sourceUrl, BlobRange sourceRange,
byte[] sourceContentMD5, AppendBlobRequestConditions destRequestConditions,
RequestConditions sourceRequestConditions, Context context) {
sourceRange = (sourceRange == null) ? new BlobRange(0) : sourceRange;
destRequestConditions = (destRequestConditions == null)
? new AppendBlobRequestConditions() : destRequestConditions;
sourceRequestConditions = (sourceRequestConditions == null)
? new RequestConditions() : sourceRequestConditions;
URL url;
try {
url = new URL(sourceUrl);
} catch (MalformedURLException ex) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sourceUrl' is not a valid url."));
}
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().appendBlockFromUrlWithRestResponseAsync(null, null, url, 0,
sourceRange.toString(), sourceContentMD5, null, null, null, destRequestConditions.getLeaseId(),
destRequestConditions.getMaxSize(), destRequestConditions.getAppendPosition(),
destRequestConditions.getIfModifiedSince(), destRequestConditions.getIfUnmodifiedSince(),
destRequestConditions.getIfMatch(), destRequestConditions.getIfNoneMatch(),
destRequestConditions.getTagsConditions(), sourceRequestConditions.getIfModifiedSince(),
sourceRequestConditions.getIfUnmodifiedSince(), sourceRequestConditions.getIfMatch(),
sourceRequestConditions.getIfNoneMatch(), null, getCustomerProvidedKey(), encryptionScope,
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(rb -> {
AppendBlobAppendBlockFromUrlHeaders hd = rb.getDeserializedHeaders();
AppendBlobItem item = new AppendBlobItem(hd.getETag(), hd.getLastModified(), hd.getContentMD5(),
hd.isServerEncrypted(), hd.getEncryptionKeySha256(), hd.getEncryptionScope(),
hd.getBlobAppendOffset(), hd.getBlobCommittedBlockCount());
return new SimpleResponse<>(rb, item);
});
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.seal}
*
* @return A reactive response signalling completion.
*/
public Mono<Void> seal() {
try {
return sealWithResponse(
}
/**
* Seals an append blob, making it read only. Any subsequent appends will fail.
*
* <p><strong>Code Samples</strong></p>
*
* {@codesnippet com.azure.storage.blob.specialized.AppendBlobAsyncClient.sealWithResponse
*
* @param options {@link AppendBlobSealOptions}
* @return A reactive response signalling completion.
*/
public Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options) {
try {
return withContext(context -> sealWithResponse(options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<Void>> sealWithResponse(AppendBlobSealOptions options, Context context) {
options = (options == null) ? new AppendBlobSealOptions() : options;
AppendBlobRequestConditions requestConditions = options.getRequestConditions();
requestConditions = (requestConditions == null) ? new AppendBlobRequestConditions() : requestConditions;
context = context == null ? Context.NONE : context;
return this.azureBlobStorage.appendBlobs().sealWithRestResponseAsync(null, null, null, null,
requestConditions.getLeaseId(), requestConditions.getIfModifiedSince(),
requestConditions.getIfUnmodifiedSince(), requestConditions.getIfMatch(),
requestConditions.getIfNoneMatch(), requestConditions.getAppendPosition(),
context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE))
.map(response -> new SimpleResponse<>(response, null));
}
} |
Do the level needs to be higher like at-least WARN? | public Mono<ShouldRetryResult> shouldRetry(Exception exception) {
Duration backoffTime;
Duration timeout;
if (!(exception instanceof RetryWithException)) {
logger.debug("Operation will NOT be retried. Current attempt {}, Exception: ", this.attemptCount,
exception);
return Mono.just(ShouldRetryResult.noRetry());
}
RetryWithException lastRetryWithException = (RetryWithException)exception;
this.request.setLastRetryWithException(lastRetryWithException);
long remainingMilliseconds = (this.waitTimeInSeconds * 1000) - this.durationTimer.getTime();
int currentRetryAttemptCount = this.attemptCount++;
if (remainingMilliseconds <= 0) {
logger.debug("Received retrywith exception after backoff/retry. Will fail the request.",
lastRetryWithException);
return Mono.just(ShouldRetryResult.error(lastRetryWithException));
}
backoffTime = Duration.ofMillis(
Math.min(
Math.min(this.currentBackoffMilliseconds, remainingMilliseconds),
RetryWithRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_MS));
this.currentBackoffMilliseconds *= RetryWithRetryPolicy.BACK_OFF_MULTIPLIER;
logger.debug("BackoffTime: {} ms.", backoffTime.toMillis());
long timeoutInMillSec = remainingMilliseconds - backoffTime.toMillis();
timeout = timeoutInMillSec > 0 ? Duration.ofMillis(timeoutInMillSec)
: Duration.ofMillis(RetryWithRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_MS);
logger.info("Received RetryWithException, will retry, ", exception);
return Mono.just(ShouldRetryResult.retryAfter(backoffTime,
Quadruple.with(false, true, timeout, currentRetryAttemptCount)));
} | logger.debug("Received retrywith exception after backoff/retry. Will fail the request.", | public Mono<ShouldRetryResult> shouldRetry(Exception exception) {
return this.retryWithRetryPolicy.shouldRetry(exception)
.flatMap((retryWithResult) -> {
if (retryWithResult.shouldRetry) {
return Mono.just(retryWithResult);
}
return this.goneRetryPolicy.shouldRetry(exception)
.flatMap((goneRetryResult) -> {
if (!goneRetryResult.shouldRetry) {
logger.debug("Operation will NOT be retried. Exception:",
exception);
this.end = Instant.now();
}
return Mono.just(goneRetryResult);
});
});
} | class GoneAndRetryWithRetryPolicy extends RetryPolicyWithDiagnostics {
private final static Logger logger = LoggerFactory.getLogger(GoneAndRetryWithRetryPolicy.class);
private final GoneRetryPolicy goneRetryPolicy;
private final RetryWithRetryPolicy retryWithRetryPolicy;
private final StopWatch durationTimer = new StopWatch();
public GoneAndRetryWithRetryPolicy(RxDocumentServiceRequest request, Integer waitTimeInSeconds) {
this.goneRetryPolicy = new GoneRetryPolicy(request, waitTimeInSeconds, durationTimer);
this.retryWithRetryPolicy = new RetryWithRetryPolicy(request, waitTimeInSeconds, this.durationTimer);
startStopWatch(this.durationTimer);
}
@Override
private void stopStopWatch(StopWatch stopwatch) {
synchronized (stopwatch) {
stopwatch.stop();
}
}
private void startStopWatch(StopWatch stopwatch) {
synchronized (stopwatch) {
stopwatch.start();
}
}
static class GoneRetryPolicy extends RetryPolicyWithDiagnostics {
private final static int DEFAULT_WAIT_TIME_IN_SECONDS = 30;
private final static int MAXIMUM_BACKOFF_TIME_IN_SECONDS = 15;
private final static int INITIAL_BACKOFF_TIME = 1;
private final static int BACK_OFF_MULTIPLIER = 2;
private final RxDocumentServiceRequest request;
private volatile int attemptCount = 1;
private volatile int attemptCountInvalidPartition = 1;
private volatile int currentBackoffSeconds = GoneRetryPolicy.INITIAL_BACKOFF_TIME;
private final StopWatch durationTimer;
private final int waitTimeInSeconds;
public final static Quadruple<Boolean, Boolean, Duration, Integer> INITIAL_ARGUMENT_VALUE_POLICY_ARG = Quadruple.with(false, false,
Duration.ofSeconds(60), 0);
public GoneRetryPolicy(
RxDocumentServiceRequest request,
Integer waitTimeInSeconds,
StopWatch durationTimer) {
checkNotNull(request, "request must not be null.");
this.request = request;
this.durationTimer = durationTimer;
this.waitTimeInSeconds = waitTimeInSeconds != null ? waitTimeInSeconds : DEFAULT_WAIT_TIME_IN_SECONDS;
}
private boolean isRetryableException(Exception exception) {
if (exception instanceof GoneException ||
exception instanceof RetryWithException ||
exception instanceof PartitionIsMigratingException ||
exception instanceof PartitionKeyRangeIsSplittingException) {
return true;
}
if (exception instanceof InvalidPartitionException) {
return this.request.getPartitionKeyRangeIdentity() == null ||
this.request.getPartitionKeyRangeIdentity().getCollectionRid() == null;
}
return false;
}
private CosmosException logAndWrapExceptionWithLastRetryWithException(Exception exception) {
RetryWithException lastRetryWithException = this.request.getLastRetryWithException();
String exceptionType;
if (exception instanceof GoneException) {
exceptionType = "GoneException";
} else if (exception instanceof PartitionKeyRangeGoneException) {
exceptionType = "PartitionKeyRangeGoneException";
} else if (exception instanceof InvalidPartitionException) {
exceptionType = "InvalidPartitionException";
} else if (exception instanceof PartitionKeyRangeIsSplittingException) {
exceptionType = "PartitionKeyRangeIsSplittingException";
} else if (exception instanceof CosmosException) {
logger.warn("Received CosmosException after backoff/retry. Will fail the request.",
exception);
return (CosmosException)exception;
} else {
throw new IllegalStateException("Invalid exception type", exception);
}
if (lastRetryWithException != null) {
logger.warn(
"Received {} after backoff/retry including at least one RetryWithException. "
+ "Will fail the request with RetryWithException. {}: {}. RetryWithException: {}",
exceptionType,
exceptionType,
exception,
lastRetryWithException);
return lastRetryWithException;
}
logger.warn(
"Received {} after backoff/retry. Will fail the request. {}",
exceptionType,
exception);
return BridgeInternal.createServiceUnavailableException(exception);
}
@Override
public Mono<ShouldRetryResult> shouldRetry(Exception exception) {
CosmosException exceptionToThrow;
Duration backoffTime = Duration.ofSeconds(0);
Duration timeout;
boolean forceRefreshAddressCache;
if (!isRetryableException(exception)) {
logger.debug("Operation will NOT be retried. Current attempt {}, Exception: ", this.attemptCount,
exception);
return Mono.just(ShouldRetryResult.noRetry());
} else if (exception instanceof GoneException &&
!request.isReadOnly() &&
BridgeInternal.hasSendingRequestStarted((CosmosException)exception)) {
logger.warn(
"Operation will NOT be retried. Write operations can not be retried safely when sending the request " +
"to the service because they aren't idempotent. Current attempt {}, Exception: ",
this.attemptCount,
exception);
return Mono.just(ShouldRetryResult.noRetry(
Quadruple.with(true, true, Duration.ofMillis(0), this.attemptCount)));
}
long remainingSeconds = this.waitTimeInSeconds - this.durationTimer.getTime() / 1000;
int currentRetryAttemptCount = this.attemptCount;
if (this.attemptCount++ > 1) {
if (remainingSeconds <= 0) {
exceptionToThrow = logAndWrapExceptionWithLastRetryWithException(exception);
return Mono.just(ShouldRetryResult.error(exceptionToThrow));
}
backoffTime = Duration.ofSeconds(Math.min(Math.min(this.currentBackoffSeconds, remainingSeconds),
GoneRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_SECONDS));
this.currentBackoffSeconds *= GoneRetryPolicy.BACK_OFF_MULTIPLIER;
logger.debug("BackoffTime: {} seconds.", backoffTime.getSeconds());
}
long timeoutInMillSec = remainingSeconds*1000 - backoffTime.toMillis();
timeout = timeoutInMillSec > 0 ? Duration.ofMillis(timeoutInMillSec)
: Duration.ofSeconds(GoneRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_SECONDS);
Pair<Mono<ShouldRetryResult>, Boolean> exceptionHandlingResult = handleException(exception);
Mono<ShouldRetryResult> result = exceptionHandlingResult.getLeft();
if (result != null) {
return result;
}
forceRefreshAddressCache = exceptionHandlingResult.getRight();
return Mono.just(ShouldRetryResult.retryAfter(backoffTime,
Quadruple.with(forceRefreshAddressCache, true, timeout, currentRetryAttemptCount)));
}
private Pair<Mono<ShouldRetryResult>, Boolean> handleException(Exception exception) {
if (exception instanceof GoneException) {
return handleGoneException((GoneException)exception);
} else if (exception instanceof PartitionIsMigratingException) {
return handlePartitionIsMigratingException((PartitionIsMigratingException)exception);
} else if (exception instanceof InvalidPartitionException) {
return handleInvalidPartitionException((InvalidPartitionException)exception);
} else if (exception instanceof PartitionKeyRangeIsSplittingException) {
return handlePartitionKeyIsSplittingException((PartitionKeyRangeIsSplittingException) exception);
}
throw new IllegalStateException("Invalid exception type", exception);
}
private Pair<Mono<ShouldRetryResult>, Boolean> handleGoneException(GoneException exception) {
logger.info("Received gone exception, will retry, {}", exception.toString());
return Pair.of(null, true);
}
private Pair<Mono<ShouldRetryResult>, Boolean> handlePartitionIsMigratingException(PartitionIsMigratingException exception) {
logger.info("Received PartitionIsMigratingException, will retry, {}", exception.toString());
this.request.forceCollectionRoutingMapRefresh = true;
return Pair.of(null, true);
}
private Pair<Mono<ShouldRetryResult>, Boolean> handlePartitionKeyIsSplittingException(PartitionKeyRangeIsSplittingException exception) {
this.request.requestContext.resolvedPartitionKeyRange = null;
this.request.requestContext.quorumSelectedLSN = -1;
this.request.requestContext.quorumSelectedStoreResponse = null;
logger.info("Received partition key range splitting exception, will retry, {}", exception.toString());
this.request.forcePartitionKeyRangeRefresh = true;
return Pair.of(null, false);
}
private Pair<Mono<ShouldRetryResult>, Boolean> handleInvalidPartitionException(InvalidPartitionException exception) {
this.request.requestContext.quorumSelectedLSN = -1;
this.request.requestContext.resolvedPartitionKeyRange = null;
this.request.requestContext.quorumSelectedStoreResponse = null;
this.request.requestContext.globalCommittedSelectedLSN = -1;
if (this.attemptCountInvalidPartition++ > 2) {
logger.warn("Received second InvalidPartitionException after backoff/retry. Will fail the request. {}",
exception.toString());
return Pair.of(
Mono.just(ShouldRetryResult.error(BridgeInternal.createServiceUnavailableException(exception))),
false);
}
logger.info("Received invalid collection exception, will retry, {}", exception.toString());
this.request.forceNameCacheRefresh = true;
return Pair.of(null, false);
}
}
static class RetryWithRetryPolicy extends RetryPolicyWithDiagnostics {
private final static int DEFAULT_WAIT_TIME_IN_SECONDS = 30;
private final static int MAXIMUM_BACKOFF_TIME_IN_MS = 15000;
private final static int INITIAL_BACKOFF_TIME_MS = 10;
private final static int BACK_OFF_MULTIPLIER = 2;
private final RxDocumentServiceRequest request;
private volatile int attemptCount = 1;
private volatile int currentBackoffMilliseconds = RetryWithRetryPolicy.INITIAL_BACKOFF_TIME_MS;
private final int waitTimeInSeconds;
private final StopWatch durationTimer;
public final static Quadruple<Boolean, Boolean, Duration, Integer> INITIAL_ARGUMENT_VALUE_POLICY_ARG = Quadruple.with(false, false,
Duration.ofSeconds(60), 0);
public RetryWithRetryPolicy(RxDocumentServiceRequest request,
Integer waitTimeInSeconds,
StopWatch durationTimer) {
this.request = request;
this.waitTimeInSeconds = waitTimeInSeconds != null ? waitTimeInSeconds : DEFAULT_WAIT_TIME_IN_SECONDS;
this.durationTimer = durationTimer;
}
@Override
public Mono<ShouldRetryResult> shouldRetry(Exception exception) {
Duration backoffTime;
Duration timeout;
if (!(exception instanceof RetryWithException)) {
logger.debug("Operation will NOT be retried. Current attempt {}, Exception: ", this.attemptCount,
exception);
return Mono.just(ShouldRetryResult.noRetry());
}
RetryWithException lastRetryWithException = (RetryWithException)exception;
this.request.setLastRetryWithException(lastRetryWithException);
long remainingMilliseconds = (this.waitTimeInSeconds * 1000) - this.durationTimer.getTime();
int currentRetryAttemptCount = this.attemptCount++;
if (remainingMilliseconds <= 0) {
logger.debug("Received retrywith exception after backoff/retry. Will fail the request.",
lastRetryWithException);
return Mono.just(ShouldRetryResult.error(lastRetryWithException));
}
backoffTime = Duration.ofMillis(
Math.min(
Math.min(this.currentBackoffMilliseconds, remainingMilliseconds),
RetryWithRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_MS));
this.currentBackoffMilliseconds *= RetryWithRetryPolicy.BACK_OFF_MULTIPLIER;
logger.debug("BackoffTime: {} ms.", backoffTime.toMillis());
long timeoutInMillSec = remainingMilliseconds - backoffTime.toMillis();
timeout = timeoutInMillSec > 0 ? Duration.ofMillis(timeoutInMillSec)
: Duration.ofMillis(RetryWithRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_MS);
logger.info("Received RetryWithException, will retry, ", exception);
return Mono.just(ShouldRetryResult.retryAfter(backoffTime,
Quadruple.with(false, true, timeout, currentRetryAttemptCount)));
}
}
} | class GoneAndRetryWithRetryPolicy extends RetryPolicyWithDiagnostics{
private final static Logger logger = LoggerFactory.getLogger(GoneAndRetryWithRetryPolicy.class);
private final GoneRetryPolicy goneRetryPolicy;
private final RetryWithRetryPolicy retryWithRetryPolicy;
private final Instant start;
private volatile Instant end;
private volatile RetryWithException lastRetryWithException;
public GoneAndRetryWithRetryPolicy(RxDocumentServiceRequest request, Integer waitTimeInSeconds) {
this.goneRetryPolicy = new GoneRetryPolicy(
request,
waitTimeInSeconds);
this.retryWithRetryPolicy = new RetryWithRetryPolicy(
waitTimeInSeconds);
this.start = Instant.now();
}
@Override
private Duration getElapsedTime() {
Instant endSnapshot = this.end != null ? this.end : Instant.now();
return Duration.between(this.start, endSnapshot);
}
class GoneRetryPolicy extends RetryPolicyWithDiagnostics {
private final static int DEFAULT_WAIT_TIME_IN_SECONDS = 30;
private final static int MAXIMUM_BACKOFF_TIME_IN_SECONDS = 15;
private final static int INITIAL_BACKOFF_TIME = 1;
private final static int BACK_OFF_MULTIPLIER = 2;
private final RxDocumentServiceRequest request;
private volatile int attemptCount = 1;
private volatile int attemptCountInvalidPartition = 1;
private volatile int currentBackoffSeconds = GoneRetryPolicy.INITIAL_BACKOFF_TIME;
private final int waitTimeInSeconds;
public GoneRetryPolicy(
RxDocumentServiceRequest request,
Integer waitTimeInSeconds) {
checkNotNull(request, "request must not be null.");
this.request = request;
this.waitTimeInSeconds = waitTimeInSeconds != null ? waitTimeInSeconds : DEFAULT_WAIT_TIME_IN_SECONDS;
}
private boolean isNonRetryableException(Exception exception) {
if (exception instanceof GoneException ||
exception instanceof RetryWithException ||
exception instanceof PartitionIsMigratingException ||
exception instanceof PartitionKeyRangeIsSplittingException) {
return false;
}
if (exception instanceof InvalidPartitionException) {
return this.request.getPartitionKeyRangeIdentity() != null &&
this.request.getPartitionKeyRangeIdentity().getCollectionRid() != null;
}
return true;
}
private CosmosException logAndWrapExceptionWithLastRetryWithException(Exception exception) {
String exceptionType;
if (exception instanceof GoneException) {
exceptionType = "GoneException";
} else if (exception instanceof PartitionKeyRangeGoneException) {
exceptionType = "PartitionKeyRangeGoneException";
} else if (exception instanceof InvalidPartitionException) {
exceptionType = "InvalidPartitionException";
} else if (exception instanceof PartitionKeyRangeIsSplittingException) {
exceptionType = "PartitionKeyRangeIsSplittingException";
} else if (exception instanceof CosmosException) {
logger.warn("Received CosmosException after backoff/retry. Will fail the request.",
exception);
return (CosmosException)exception;
} else {
throw new IllegalStateException("Invalid exception type", exception);
}
RetryWithException lastRetryWithExceptionSnapshot =
GoneAndRetryWithRetryPolicy.this.lastRetryWithException;
if (lastRetryWithExceptionSnapshot != null) {
logger.warn(
"Received {} after backoff/retry including at least one RetryWithException. "
+ "Will fail the request with RetryWithException. {}: {}. RetryWithException: {}",
exceptionType,
exceptionType,
exception,
lastRetryWithExceptionSnapshot);
return lastRetryWithExceptionSnapshot;
}
logger.warn(
"Received {} after backoff/retry. Will fail the request. {}",
exceptionType,
exception);
return BridgeInternal.createServiceUnavailableException(exception);
}
@Override
public Mono<ShouldRetryResult> shouldRetry(Exception exception) {
CosmosException exceptionToThrow;
Duration backoffTime = Duration.ofSeconds(0);
Duration timeout;
boolean forceRefreshAddressCache;
if (isNonRetryableException(exception)) {
logger.debug("Operation will NOT be retried. Current attempt {}, Exception: ", this.attemptCount,
exception);
return Mono.just(ShouldRetryResult.noRetry());
} else if (exception instanceof GoneException &&
!request.isReadOnly() &&
BridgeInternal.hasSendingRequestStarted((CosmosException)exception)) {
logger.warn(
"Operation will NOT be retried. Write operations can not be retried safely when sending the request " +
"to the service because they aren't idempotent. Current attempt {}, Exception: ",
this.attemptCount,
exception);
return Mono.just(ShouldRetryResult.noRetry(
Quadruple.with(true, true, Duration.ofMillis(0), this.attemptCount)));
}
long remainingSeconds = this.waitTimeInSeconds -
GoneAndRetryWithRetryPolicy.this.getElapsedTime().toMillis() / 1_000L;
int currentRetryAttemptCount = this.attemptCount;
if (this.attemptCount++ > 1) {
if (remainingSeconds <= 0) {
exceptionToThrow = logAndWrapExceptionWithLastRetryWithException(exception);
return Mono.just(ShouldRetryResult.error(exceptionToThrow));
}
backoffTime = Duration.ofSeconds(Math.min(Math.min(this.currentBackoffSeconds, remainingSeconds),
GoneRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_SECONDS));
this.currentBackoffSeconds *= GoneRetryPolicy.BACK_OFF_MULTIPLIER;
logger.debug("BackoffTime: {} seconds.", backoffTime.getSeconds());
}
long timeoutInMillSec = remainingSeconds*1000 - backoffTime.toMillis();
timeout = timeoutInMillSec > 0 ? Duration.ofMillis(timeoutInMillSec)
: Duration.ofSeconds(GoneRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_SECONDS);
Pair<Mono<ShouldRetryResult>, Boolean> exceptionHandlingResult = handleException(exception);
Mono<ShouldRetryResult> result = exceptionHandlingResult.getLeft();
if (result != null) {
return result;
}
forceRefreshAddressCache = exceptionHandlingResult.getRight();
return Mono.just(ShouldRetryResult.retryAfter(backoffTime,
Quadruple.with(forceRefreshAddressCache, true, timeout, currentRetryAttemptCount)));
}
private Pair<Mono<ShouldRetryResult>, Boolean> handleException(Exception exception) {
if (exception instanceof GoneException) {
return handleGoneException((GoneException)exception);
} else if (exception instanceof PartitionIsMigratingException) {
return handlePartitionIsMigratingException((PartitionIsMigratingException)exception);
} else if (exception instanceof InvalidPartitionException) {
return handleInvalidPartitionException((InvalidPartitionException)exception);
} else if (exception instanceof PartitionKeyRangeIsSplittingException) {
return handlePartitionKeyIsSplittingException((PartitionKeyRangeIsSplittingException) exception);
}
throw new IllegalStateException("Invalid exception type", exception);
}
private Pair<Mono<ShouldRetryResult>, Boolean> handleGoneException(GoneException exception) {
logger.info("Received gone exception, will retry, {}", exception.toString());
return Pair.of(null, true);
}
private Pair<Mono<ShouldRetryResult>, Boolean> handlePartitionIsMigratingException(PartitionIsMigratingException exception) {
logger.info("Received PartitionIsMigratingException, will retry, {}", exception.toString());
this.request.forceCollectionRoutingMapRefresh = true;
return Pair.of(null, true);
}
private Pair<Mono<ShouldRetryResult>, Boolean> handlePartitionKeyIsSplittingException(PartitionKeyRangeIsSplittingException exception) {
this.request.requestContext.resolvedPartitionKeyRange = null;
this.request.requestContext.quorumSelectedLSN = -1;
this.request.requestContext.quorumSelectedStoreResponse = null;
logger.info("Received partition key range splitting exception, will retry, {}", exception.toString());
this.request.forcePartitionKeyRangeRefresh = true;
return Pair.of(null, false);
}
private Pair<Mono<ShouldRetryResult>, Boolean> handleInvalidPartitionException(InvalidPartitionException exception) {
this.request.requestContext.quorumSelectedLSN = -1;
this.request.requestContext.resolvedPartitionKeyRange = null;
this.request.requestContext.quorumSelectedStoreResponse = null;
this.request.requestContext.globalCommittedSelectedLSN = -1;
if (this.attemptCountInvalidPartition++ > 2) {
logger.warn("Received second InvalidPartitionException after backoff/retry. Will fail the request. {}",
exception.toString());
return Pair.of(
Mono.just(ShouldRetryResult.error(BridgeInternal.createServiceUnavailableException(exception))),
false);
}
logger.info("Received invalid collection exception, will retry, {}", exception.toString());
this.request.forceNameCacheRefresh = true;
return Pair.of(null, false);
}
}
class RetryWithRetryPolicy extends RetryPolicyWithDiagnostics {
private final static int DEFAULT_WAIT_TIME_IN_SECONDS = 30;
private final static int MAXIMUM_BACKOFF_TIME_IN_MS = 15000;
private final static int INITIAL_BACKOFF_TIME_MS = 10;
private final static int BACK_OFF_MULTIPLIER = 2;
private volatile int attemptCount = 1;
private volatile int currentBackoffMilliseconds = RetryWithRetryPolicy.INITIAL_BACKOFF_TIME_MS;
private final int waitTimeInSeconds;
public RetryWithRetryPolicy(Integer waitTimeInSeconds) {
this.waitTimeInSeconds = waitTimeInSeconds != null ? waitTimeInSeconds : DEFAULT_WAIT_TIME_IN_SECONDS;
}
@Override
public Mono<ShouldRetryResult> shouldRetry(Exception exception) {
Duration backoffTime;
Duration timeout;
if (!(exception instanceof RetryWithException)) {
logger.debug("Operation will NOT be retried. Current attempt {}, Exception: ", this.attemptCount,
exception);
return Mono.just(ShouldRetryResult.noRetry());
}
RetryWithException lastRetryWithException = (RetryWithException)exception;
GoneAndRetryWithRetryPolicy.this.lastRetryWithException = lastRetryWithException;
long remainingMilliseconds =
(this.waitTimeInSeconds * 1_000L) -
GoneAndRetryWithRetryPolicy.this.getElapsedTime().toMillis();
int currentRetryAttemptCount = this.attemptCount++;
if (remainingMilliseconds <= 0) {
logger.warn("Received RetryWithException after backoff/retry. Will fail the request.",
lastRetryWithException);
return Mono.just(ShouldRetryResult.error(lastRetryWithException));
}
backoffTime = Duration.ofMillis(
Math.min(
Math.min(this.currentBackoffMilliseconds, remainingMilliseconds),
RetryWithRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_MS));
this.currentBackoffMilliseconds *= RetryWithRetryPolicy.BACK_OFF_MULTIPLIER;
logger.debug("BackoffTime: {} ms.", backoffTime.toMillis());
long timeoutInMillSec = remainingMilliseconds - backoffTime.toMillis();
timeout = timeoutInMillSec > 0 ? Duration.ofMillis(timeoutInMillSec)
: Duration.ofMillis(RetryWithRetryPolicy.MAXIMUM_BACKOFF_TIME_IN_MS);
logger.info("Received RetryWithException, will retry, ", exception);
return Mono.just(ShouldRetryResult.retryAfter(backoffTime,
Quadruple.with(false, true, timeout, currentRetryAttemptCount)));
}
}
} |
I think this statement is the same as ReceiveAndDeleteMessageTest.class? | public static void main(String[] args) {
Class<?>[] testClasses;
try {
testClasses = new Class<?>[]{
Class.forName(ReceiveAndDeleteMessageTest.class.getName()),
Class.forName(ReceiveAndLockMessageTest.class.getName()),
Class.forName(SendMessageTest.class.getName()),
Class.forName(SendMessagesTest.class.getName())
};
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
PerfStressProgram.run(testClasses, args);
} | Class.forName(ReceiveAndDeleteMessageTest.class.getName()), | public static void main(String[] args) {
Class<?>[] testClasses;
testClasses = new Class<?>[]{
ReceiveAndDeleteMessageTest.class,
ReceiveAndLockMessageTest.class,
SendMessageTest.class,
SendMessagesTest.class
};
PerfStressProgram.run(testClasses, args);
} | class App {
/**
* main function.
* @param args args
* @throws RuntimeException If not able to load test classes.
*/
} | class App {
/**
* main function.
* @param args args
* @throws RuntimeException If not able to load test classes.
*/
} |
@g2vinay can we update the guides and all uses of this just to use `.class`? ```java PerfStressProgram.run(new Class<?>[] { ReceiveAndDeleteMessageTest.class, ReceiveAndLockMessageTest.class, SendMessageTest.class, SendMessagesTest.class }, args); ``` There should be no need to get the `Class` by name as it is already within the scope of the project using it. | public static void main(String[] args) {
Class<?>[] testClasses;
try {
testClasses = new Class<?>[]{
Class.forName(ReceiveAndDeleteMessageTest.class.getName()),
Class.forName(ReceiveAndLockMessageTest.class.getName()),
Class.forName(SendMessageTest.class.getName()),
Class.forName(SendMessagesTest.class.getName())
};
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
PerfStressProgram.run(testClasses, args);
} | }; | public static void main(String[] args) {
Class<?>[] testClasses;
testClasses = new Class<?>[]{
ReceiveAndDeleteMessageTest.class,
ReceiveAndLockMessageTest.class,
SendMessageTest.class,
SendMessagesTest.class
};
PerfStressProgram.run(testClasses, args);
} | class App {
/**
* main function.
* @param args args
* @throws RuntimeException If not able to load test classes.
*/
} | class App {
/**
* main function.
* @param args args
* @throws RuntimeException If not able to load test classes.
*/
} |
I think so. I happened to have the same comment 4 lines above. | public static void main(String[] args) {
Class<?>[] testClasses;
try {
testClasses = new Class<?>[]{
Class.forName(ReceiveAndDeleteMessageTest.class.getName()),
Class.forName(ReceiveAndLockMessageTest.class.getName()),
Class.forName(SendMessageTest.class.getName()),
Class.forName(SendMessagesTest.class.getName())
};
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
PerfStressProgram.run(testClasses, args);
} | }; | public static void main(String[] args) {
Class<?>[] testClasses;
testClasses = new Class<?>[]{
ReceiveAndDeleteMessageTest.class,
ReceiveAndLockMessageTest.class,
SendMessageTest.class,
SendMessagesTest.class
};
PerfStressProgram.run(testClasses, args);
} | class App {
/**
* main function.
* @param args args
* @throws RuntimeException If not able to load test classes.
*/
} | class App {
/**
* main function.
* @param args args
* @throws RuntimeException If not able to load test classes.
*/
} |
@srnagar @JonathanGiles Confirming if this ^^ is how we would be using the `FormField<T>` for strongly typed examples? Since we don't have any `T` value, still would need to extract the corresponding `asString`, or `asX` methods. | public static void main(final String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String receiptUrl = "https:
+ "/azure-ai-formrecognizer/src/samples/java/sample-forms/receipts/contoso-allinone.jpg";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeReceiptPoller =
client.beginRecognizeReceiptsFromUrl(receiptUrl);
List<RecognizedForm> receiptPageResults = recognizeReceiptPoller.getFinalResult();
for (int i = 0; i < receiptPageResults.size(); i++) {
final RecognizedForm recognizedForm = receiptPageResults.get(i);
System.out.printf("----------- Recognized receipt info for page %d -----------%n", i);
Receipt usReceipt = new Receipt(recognizedForm);
System.out.printf("Merchant Name: %s, confidence: %.2f%n", usReceipt.getMerchantName().getFieldValue(),
usReceipt.getMerchantName().getConfidence());
System.out.printf("Merchant Address: %s, confidence: %.2f%n",
usReceipt.getMerchantAddress().getFieldValue(),
usReceipt.getMerchantAddress().getConfidence());
System.out.printf("Merchant Phone Number %s, confidence: %.2f%n",
usReceipt.getMerchantPhoneNumber().getFieldValue(), usReceipt.getMerchantPhoneNumber().getConfidence());
System.out.printf("Total: %.2f confidence: %.2f%n", usReceipt.getTotal().getFieldValue(),
usReceipt.getTotal().getConfidence());
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
usReceipt.getTransactionDate().getFieldValue(), usReceipt.getTransactionDate().getConfidence());
System.out.printf("Transaction Time: %s, confidence: %.2f%n",
usReceipt.getTransactionTime().getFieldValue(), usReceipt.getTransactionTime().getConfidence());
System.out.printf("Receipt Items: %n");
usReceipt.getReceiptItems().forEach(receiptItem -> {
if (receiptItem.getName() != null) {
System.out.printf("Name: %s, confidence: %.2f%n", receiptItem.getName().getFieldValue(),
receiptItem.getName().getConfidence());
}
if (receiptItem.getQuantity() != null) {
System.out.printf("Quantity: %f, confidence: %.2f%n", receiptItem.getQuantity().getFieldValue(),
receiptItem.getQuantity().getConfidence());
}
if (receiptItem.getPrice() != null) {
System.out.printf("Price: %f, confidence: %.2f%n", receiptItem.getPrice().getFieldValue(),
receiptItem.getPrice().getConfidence());
}
if (receiptItem.getTotalPrice() != null) {
System.out.printf("Total Price: %f, confidence: %.2f%n",
receiptItem.getTotalPrice().getFieldValue(), receiptItem.getTotalPrice().getConfidence());
}
});
System.out.println("-----------------------------------");
}
} | public static void main(final String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String receiptUrl = "https:
+ "/azure-ai-formrecognizer/src/samples/java/sample-forms/receipts/contoso-allinone.jpg";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeReceiptPoller =
client.beginRecognizeReceiptsFromUrl(receiptUrl);
List<RecognizedForm> receiptPageResults = recognizeReceiptPoller.getFinalResult();
for (int i = 0; i < receiptPageResults.size(); i++) {
final RecognizedForm recognizedForm = receiptPageResults.get(i);
System.out.printf("----------- Recognized receipt info for page %d -----------%n", i);
Receipt usReceipt = new Receipt(recognizedForm);
System.out.printf("Merchant Name: %s, confidence: %.2f%n", usReceipt.getMerchantName().getValue(),
usReceipt.getMerchantName().getConfidence());
System.out.printf("Merchant Address: %s, confidence: %.2f%n",
usReceipt.getMerchantAddress().getValue(),
usReceipt.getMerchantAddress().getConfidence());
System.out.printf("Merchant Phone Number %s, confidence: %.2f%n",
usReceipt.getMerchantPhoneNumber().getValue(), usReceipt.getMerchantPhoneNumber().getConfidence());
System.out.printf("Total: %.2f confidence: %.2f%n", usReceipt.getTotal().getValue(),
usReceipt.getTotal().getConfidence());
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
usReceipt.getTransactionDate().getValue(), usReceipt.getTransactionDate().getConfidence());
System.out.printf("Transaction Time: %s, confidence: %.2f%n",
usReceipt.getTransactionTime().getValue(), usReceipt.getTransactionTime().getConfidence());
System.out.printf("Receipt Items: %n");
usReceipt.getReceiptItems().forEach(receiptItem -> {
if (receiptItem.getName() != null) {
System.out.printf("Name: %s, confidence: %.2f%n", receiptItem.getName().getValue(),
receiptItem.getName().getConfidence());
}
if (receiptItem.getQuantity() != null) {
System.out.printf("Quantity: %.2f, confidence: %.2f%n", receiptItem.getQuantity().getValue(),
receiptItem.getQuantity().getConfidence());
}
if (receiptItem.getPrice() != null) {
System.out.printf("Price: %.2f, confidence: %.2f%n", receiptItem.getPrice().getValue(),
receiptItem.getPrice().getConfidence());
}
if (receiptItem.getTotalPrice() != null) {
System.out.printf("Total Price: %.2f, confidence: %.2f%n",
receiptItem.getTotalPrice().getValue(), receiptItem.getTotalPrice().getConfidence());
}
});
System.out.println("-----------------------------------");
}
} | class StronglyTypedRecognizedForm {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} | class StronglyTypedRecognizedForm {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} | |
assertTrue(inputMap.equals(actualList)); https://www.baeldung.com/java-compare-hashmaps | public void toMapFromMap() {
Map<String, FormField<?>> inputMap = new HashMap<String, FormField<?>>() {
{
put("key", new FormField<>(null, null, null, null, 0));
}
};
Map<String, FormField<?>> actualList = new FormField<>(null, null, null,
new FieldValue(FieldValueType.MAP).setFormFieldMap(inputMap), 0).getValue().asMap();
assertEquals(inputMap, actualList);
} | assertEquals(inputMap, actualList); | public void toMapFromMap() {
Map<String, FormField> inputMap = new HashMap<String, FormField>() {
{
put("key", new FormField(null, null, null, null, 0));
}
};
Map<String, FormField> actualMap = new FormField(null, null, null,
new FieldValue(inputMap, FieldValueType.MAP), 0).getValue().asMap();
assertEquals(inputMap, actualMap);
} | class FieldValueExtensionMethodTest {
/**
* Test for {@link FieldValue
*/
@Test
public void toDateFromDate() {
LocalDate inputDate = LocalDate.of(2006, 6, 6);
FormField<?> formField = new FormField<>(null, null, null, new FieldValue(FieldValueType.DATE)
.setFormFieldDate(inputDate), 0);
LocalDate actualDate = formField.getValue().asDate();
assertEquals(inputDate, actualDate);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDateFromString() {
String inputDateString = "2006/06/06";
FormField<?> formField = new FormField<>(null, null, null,
new FieldValue(FieldValueType.STRING).setFormFieldString(inputDateString), 0);
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
formField.getValue().asDate());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as DATE from field value "
+ "of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDateFromNull() {
FormField<?> formField = new FormField<>(null, null, null, new FieldValue(FieldValueType.DATE)
.setFormFieldDate(null), 0);
assertNull(formField.getValue().asDate());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toTimeFromTime() {
LocalTime inputTime = LocalTime.parse("13:59:00", DateTimeFormatter.ofPattern("HH:mm:ss"));
FormField<?> formField = new FormField<>(null, null, null,
new FieldValue(FieldValueType.TIME).setFormFieldTime(inputTime), 0);
LocalTime actualTime = formField.getValue().asTime();
assertEquals(inputTime, actualTime);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toTimeFromString() {
String inputTimeString = "13:59:00";
FormField<?> formField = new FormField<>(null, null, null,
new FieldValue(FieldValueType.STRING).setFormFieldString(inputTimeString), 0);
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
formField.getValue().asTime());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as TIME from field"
+ " value of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toTimeFromNull() {
assertNull(new FormField<>(null, null, null,
new FieldValue(FieldValueType.TIME), 0).getValue().asTime());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toListFromList() {
List<FormField<?>> inputList = new ArrayList<>(Arrays.asList(new FormField<>(null, null, null, null, 0)));
FormField<?> formField = new FormField<>(null, null, null,
new FieldValue(FieldValueType.LIST).setFormFieldList(inputList), 0);
List<FormField<?>> actualList = formField.getValue().asList();
assertEquals(inputList, actualList);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toListFromString() {
String test = "testString";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField<>(null, null, null,
new FieldValue(FieldValueType.STRING).setFormFieldString(test), 0).getValue().asList());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as a LIST from field value "
+ "of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toListFromNull() {
assertNull(new FormField<>(null, null, null, new FieldValue(FieldValueType.LIST), 0).getValue().asList());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toPhoneNumberFromPhoneNumber() {
String phoneNumber = "19876543210";
String actualPhoneNumber = new FormField<>(null, null, null,
new FieldValue(FieldValueType.PHONE_NUMBER).setFormFieldPhoneNumber(phoneNumber), 0)
.getValue().asPhoneNumber();
assertEquals(phoneNumber, actualPhoneNumber);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toPhoneNumberFromString() {
String phoneNumber = "19876543210";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField<>(null, null, null,
new FieldValue(FieldValueType.STRING).setFormFieldString(phoneNumber), 0)
.getValue().asPhoneNumber());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as aPHONE_NUMBER "
+ "from field value of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toPhoneNumberFromNull() {
assertNull(new FormField<>(null, null, null,
new FieldValue(FieldValueType.PHONE_NUMBER), 0).getValue().asPhoneNumber());
}
/**
* Test for {@link FieldValue
*/
@Test
/**
* Test for {@link FieldValue
*/
@Test
public void toMapFromString() {
String str = "1";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField<>(null, null, null,
new FieldValue(FieldValueType.STRING).setFormFieldString(str), 0).getValue().asMap());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as a MAP from field "
+ "value of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toMapFromNull() {
assertNull(new FormField<>(null, null, null,
new FieldValue(FieldValueType.MAP), 0).getValue().asMap());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDoubleFromDouble() {
Double inputDouble = 2.2;
Double actualDoubleValue = new FormField<>(null, null, null,
new FieldValue(FieldValueType.DOUBLE).setFormFieldDouble(inputDouble), 0).getValue().asDouble();
assertEquals(inputDouble, actualDoubleValue);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDoubleFromString() {
String doubleString = "2.2";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField<>(null, null, null,
new FieldValue(FieldValueType.STRING).setFormFieldString(doubleString), 0).getValue().asDouble());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as DOUBLE from "
+ "field value of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDoubleFromNull() {
assertNull(new FormField<>(null, null, null,
new FieldValue(FieldValueType.DOUBLE), 0).getValue().asDouble());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toLongFromLong() {
long inputLong = 22;
Long actualLongValue = new FormField<>(null, null, null,
new FieldValue(FieldValueType.LONG).setFormFieldLong(inputLong), 0).getValue().asLong();
assertEquals(inputLong, actualLongValue);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toLongFromString() {
String inputLongString = "22";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField<>(null, null, null,
new FieldValue(FieldValueType.STRING).setFormFieldString(inputLongString), 0).getValue().asLong());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as LONG from field value of "
+ "type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toLongFromNull() {
assertNull(new FormField<>(null, null, null,
new FieldValue(FieldValueType.LONG), 0).getValue().asLong());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toStringFromTime() {
LocalTime inputTime = LocalTime.parse("13:59:00", DateTimeFormatter.ofPattern("HH:mm:ss"));
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField<>(null, null, null,
new FieldValue(FieldValueType.TIME).setFormFieldTime(inputTime), 0).getValue().asString());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as STRING from field "
+ "value of type TIME");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toStringFromString() {
String stringValue = "String value";
String actualStringValue = new FormField<>(null, null, null,
new FieldValue(FieldValueType.STRING).setFormFieldString(stringValue), 0).getValue().asString();
assertEquals(stringValue, actualStringValue);
}
} | class FieldValueExtensionMethodTest {
/**
* Test for {@link FieldValue
*/
@Test
public void toDateFromDate() {
LocalDate inputDate = LocalDate.of(2006, 6, 6);
FormField formField = new FormField(null, null, null,
new FieldValue(inputDate, FieldValueType.DATE), 0);
LocalDate actualDate = formField.getValue().asDate();
assertEquals(inputDate, actualDate);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDateFromString() {
String inputDateString = "2006/06/06";
FormField formField = new FormField(null, null, null,
new FieldValue(inputDateString, FieldValueType.STRING), 0);
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
formField.getValue().asDate());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as DATE from field value "
+ "of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDateFromNull() {
FormField formField = new FormField(null, null, null,
new FieldValue(null, FieldValueType.DATE), 0);
assertNull(formField.getValue().asDate());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toTimeFromTime() {
LocalTime inputTime = LocalTime.parse("13:59:00", DateTimeFormatter.ofPattern("HH:mm:ss"));
FormField formField = new FormField(null, null, null,
new FieldValue(inputTime, FieldValueType.TIME), 0);
LocalTime actualTime = formField.getValue().asTime();
assertEquals(inputTime, actualTime);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toTimeFromString() {
String inputTimeString = "13:59:00";
FormField formField = new FormField(null, null, null,
new FieldValue(inputTimeString, FieldValueType.STRING), 0);
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
formField.getValue().asTime());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as TIME from field"
+ " value of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toTimeFromNull() {
assertNull(new FormField(null, null, null,
new FieldValue(null, FieldValueType.TIME), 0).getValue().asTime());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toListFromList() {
List<FormField> inputList = new ArrayList<>(Arrays.asList(new FormField(null, null, null, null, 0)));
FormField formField = new FormField(null, null, null,
new FieldValue(inputList, FieldValueType.LIST), 0);
List<FormField> actualList = formField.getValue().asList();
assertEquals(inputList, actualList);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toListFromString() {
String test = "testString";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField(null, null, null,
new FieldValue(test, FieldValueType.STRING), 0).getValue().asList());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as a LIST from field value "
+ "of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toListFromNull() {
assertNull(new FormField(null, null, null, new FieldValue(null, FieldValueType.LIST), 0).getValue().asList());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toPhoneNumberFromPhoneNumber() {
String phoneNumber = "19876543210";
String actualPhoneNumber = new FormField(null, null, null,
new FieldValue(phoneNumber, FieldValueType.PHONE_NUMBER), 0)
.getValue().asPhoneNumber();
assertEquals(phoneNumber, actualPhoneNumber);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toPhoneNumberFromString() {
String phoneNumber = "19876543210";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField(null, null, null,
new FieldValue(phoneNumber, FieldValueType.STRING), 0)
.getValue().asPhoneNumber());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as aPHONE_NUMBER "
+ "from field value of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toPhoneNumberFromNull() {
assertNull(new FormField(null, null, null,
new FieldValue(null, FieldValueType.PHONE_NUMBER), 0).getValue().asPhoneNumber());
}
/**
* Test for {@link FieldValue
*/
@Test
/**
* Test for {@link FieldValue
*/
@Test
public void toMapFromString() {
String str = "1";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField(null, null, null,
new FieldValue(str, FieldValueType.STRING), 0).getValue().asMap());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as a MAP from field "
+ "value of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toMapFromNull() {
assertNull(new FormField(null, null, null,
new FieldValue(null, FieldValueType.MAP), 0).getValue().asMap());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDoubleFromDouble() {
Double inputDouble = 2.2;
Double actualDoubleValue = new FormField(null, null, null,
new FieldValue(inputDouble, FieldValueType.DOUBLE), 0).getValue().asDouble();
assertEquals(inputDouble, actualDoubleValue);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDoubleFromString() {
String doubleString = "2.2";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField(null, null, null,
new FieldValue(doubleString, FieldValueType.STRING), 0).getValue().asDouble());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as DOUBLE from "
+ "field value of type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toDoubleFromNull() {
assertNull(new FormField(null, null, null,
new FieldValue(null, FieldValueType.DOUBLE), 0).getValue().asDouble());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toLongFromLong() {
long inputLong = 22;
Long actualLongValue = new FormField(null, null, null,
new FieldValue(inputLong, FieldValueType.LONG), 0).getValue().asLong();
assertEquals(inputLong, actualLongValue);
}
/**
* Test for {@link FieldValue
*/
@Test
public void toLongFromString() {
String inputLongString = "22";
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField(null, null, null,
new FieldValue(inputLongString, FieldValueType.STRING), 0).getValue().asLong());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as LONG from field value of "
+ "type STRING");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toLongFromNull() {
assertNull(new FormField(null, null, null,
new FieldValue(null, FieldValueType.LONG), 0).getValue().asLong());
}
/**
* Test for {@link FieldValue
*/
@Test
public void toStringFromTime() {
LocalTime inputTime = LocalTime.parse("13:59:00", DateTimeFormatter.ofPattern("HH:mm:ss"));
final UnsupportedOperationException unsupportedOperationException =
assertThrows(UnsupportedOperationException.class, () ->
new FormField(null, null, null,
new FieldValue(inputTime, FieldValueType.TIME), 0).getValue().asString());
assertEquals(unsupportedOperationException.getMessage(), "Cannot get field as STRING from field "
+ "value of type TIME");
}
/**
* Test for {@link FieldValue
*/
@Test
public void toStringFromString() {
String stringValue = "String value";
String actualStringValue = new FormField(null, null, null,
new FieldValue(stringValue, FieldValueType.STRING), 0).getValue().asString();
assertEquals(stringValue, actualStringValue);
}
} |
I don't think the generic type `<T>` is useful if the user still has to do `getFieldValue().asString()`. I think we are trying to make `FormField` work both ways and is causing this issue. Have you considered the approach below? It's not ideal but a new type that's defined once can reduce the burden on the user. They'll not have to repeatedly call `getValue().asType()` method which is going to be accessed a lot more frequently. ```java // Receipt type public final class Receipt { private ReceiptType receiptType; private TypedFormField<String> merchantAddress; // return a different form field type public Receipt(RecognizedForm recognizedForm) { for (Map.Entry<String, FormField> entry : recognizedForm.getFields().entrySet()) { String key = entry.getKey(); FormField fieldValue = entry.getValue(); switch (key) { case "MerchantAddress": merchantAddress = new TypedFormField(fieldValue, String.class); break; default: break; } } } // getter and setters here public TypedFormField<String> getMerchantAddress() { return merchantAddress; } } ------------------------------------------- // Strongly typed form field public class TypedFormField<T> { // with a better class name private final FormField formField; private final Class<T> type; public TypedFormField(FormField formField, Class<T> type) { this.formField = formField; this.type = type; } public T getValue() { if (formField.getValueType() == FieldValueType.STRING && type.getClass().equals(String.class)) { return formField.getValue().asDate(); } throw new IllegalStateException("Type mismatch error"); } public float getConfidence() { return this.formField.getConfidence(); } } ------------------------------------------- // User code Receipt usReceipt = new Receipt(recognizedForm); String address = usReceipt.getMerchantAddress().getValue() float confidence = usReceipt.getMerchantAddress().getConfidence(); ``` | public static void main(final String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String receiptUrl = "https:
+ "/azure-ai-formrecognizer/src/samples/java/sample-forms/receipts/contoso-allinone.jpg";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeReceiptPoller =
client.beginRecognizeReceiptsFromUrl(receiptUrl);
List<RecognizedForm> receiptPageResults = recognizeReceiptPoller.getFinalResult();
for (int i = 0; i < receiptPageResults.size(); i++) {
final RecognizedForm recognizedForm = receiptPageResults.get(i);
System.out.printf("----------- Recognized receipt info for page %d -----------%n", i);
Receipt usReceipt = new Receipt(recognizedForm);
System.out.printf("Merchant Name: %s, confidence: %.2f%n", usReceipt.getMerchantName().getFieldValue(),
usReceipt.getMerchantName().getConfidence());
System.out.printf("Merchant Address: %s, confidence: %.2f%n",
usReceipt.getMerchantAddress().getFieldValue(),
usReceipt.getMerchantAddress().getConfidence());
System.out.printf("Merchant Phone Number %s, confidence: %.2f%n",
usReceipt.getMerchantPhoneNumber().getFieldValue(), usReceipt.getMerchantPhoneNumber().getConfidence());
System.out.printf("Total: %.2f confidence: %.2f%n", usReceipt.getTotal().getFieldValue(),
usReceipt.getTotal().getConfidence());
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
usReceipt.getTransactionDate().getFieldValue(), usReceipt.getTransactionDate().getConfidence());
System.out.printf("Transaction Time: %s, confidence: %.2f%n",
usReceipt.getTransactionTime().getFieldValue(), usReceipt.getTransactionTime().getConfidence());
System.out.printf("Receipt Items: %n");
usReceipt.getReceiptItems().forEach(receiptItem -> {
if (receiptItem.getName() != null) {
System.out.printf("Name: %s, confidence: %.2f%n", receiptItem.getName().getFieldValue(),
receiptItem.getName().getConfidence());
}
if (receiptItem.getQuantity() != null) {
System.out.printf("Quantity: %f, confidence: %.2f%n", receiptItem.getQuantity().getFieldValue(),
receiptItem.getQuantity().getConfidence());
}
if (receiptItem.getPrice() != null) {
System.out.printf("Price: %f, confidence: %.2f%n", receiptItem.getPrice().getFieldValue(),
receiptItem.getPrice().getConfidence());
}
if (receiptItem.getTotalPrice() != null) {
System.out.printf("Total Price: %f, confidence: %.2f%n",
receiptItem.getTotalPrice().getFieldValue(), receiptItem.getTotalPrice().getConfidence());
}
});
System.out.println("-----------------------------------");
}
} | public static void main(final String[] args) {
FormRecognizerClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildClient();
String receiptUrl = "https:
+ "/azure-ai-formrecognizer/src/samples/java/sample-forms/receipts/contoso-allinone.jpg";
SyncPoller<OperationResult, List<RecognizedForm>> recognizeReceiptPoller =
client.beginRecognizeReceiptsFromUrl(receiptUrl);
List<RecognizedForm> receiptPageResults = recognizeReceiptPoller.getFinalResult();
for (int i = 0; i < receiptPageResults.size(); i++) {
final RecognizedForm recognizedForm = receiptPageResults.get(i);
System.out.printf("----------- Recognized receipt info for page %d -----------%n", i);
Receipt usReceipt = new Receipt(recognizedForm);
System.out.printf("Merchant Name: %s, confidence: %.2f%n", usReceipt.getMerchantName().getValue(),
usReceipt.getMerchantName().getConfidence());
System.out.printf("Merchant Address: %s, confidence: %.2f%n",
usReceipt.getMerchantAddress().getValue(),
usReceipt.getMerchantAddress().getConfidence());
System.out.printf("Merchant Phone Number %s, confidence: %.2f%n",
usReceipt.getMerchantPhoneNumber().getValue(), usReceipt.getMerchantPhoneNumber().getConfidence());
System.out.printf("Total: %.2f confidence: %.2f%n", usReceipt.getTotal().getValue(),
usReceipt.getTotal().getConfidence());
System.out.printf("Transaction Date: %s, confidence: %.2f%n",
usReceipt.getTransactionDate().getValue(), usReceipt.getTransactionDate().getConfidence());
System.out.printf("Transaction Time: %s, confidence: %.2f%n",
usReceipt.getTransactionTime().getValue(), usReceipt.getTransactionTime().getConfidence());
System.out.printf("Receipt Items: %n");
usReceipt.getReceiptItems().forEach(receiptItem -> {
if (receiptItem.getName() != null) {
System.out.printf("Name: %s, confidence: %.2f%n", receiptItem.getName().getValue(),
receiptItem.getName().getConfidence());
}
if (receiptItem.getQuantity() != null) {
System.out.printf("Quantity: %.2f, confidence: %.2f%n", receiptItem.getQuantity().getValue(),
receiptItem.getQuantity().getConfidence());
}
if (receiptItem.getPrice() != null) {
System.out.printf("Price: %.2f, confidence: %.2f%n", receiptItem.getPrice().getValue(),
receiptItem.getPrice().getConfidence());
}
if (receiptItem.getTotalPrice() != null) {
System.out.printf("Total Price: %.2f, confidence: %.2f%n",
receiptItem.getTotalPrice().getValue(), receiptItem.getTotalPrice().getConfidence());
}
});
System.out.println("-----------------------------------");
}
} | class StronglyTypedRecognizedForm {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} | class StronglyTypedRecognizedForm {
/**
* Main method to invoke this demo.
*
* @param args Unused. Arguments to the program.
*/
} | |
expectNextCount = 2 | public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
} | StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify(); | public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).expectNextCount(0).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNextCount(0).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} |
Setup can be done on a blocking call - need not to worry about it, but this looks good, thanks :) | public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
} | StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify(); | public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).expectNextCount(0).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNextCount(0).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} |
Here, we should test that the response is the saved `DOMAIN_1` something like this -> ```suggestion StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify(); ``` | public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).verifyComplete();
} | StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify(); | public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).expectNextCount(0).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete();
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNextCount(0).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} |
Here, we can make sure that nothing is returned from the backend - since we deleted all entities. ```suggestion StepVerifier.create(idMono).expectNextCount(0).verifyComplete(); ``` | public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).verifyComplete();
} | StepVerifier.create(idMono).verifyComplete(); | public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).expectNextCount(0).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete();
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNextCount(0).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} |
Same here, we can make sure that entities are not returned back. ```suggestion StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete(); ``` | public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).verifyComplete();
} | StepVerifier.create(afterDelIdMono).verifyComplete(); | public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).expectNextCount(0).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete();
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNextCount(0).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} |
We can verify the count here as well, like you have done below. | public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
} | StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify(); | public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).expectNextCount(0).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNextCount(0).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} |
Same here, make sure the expectNextCount = 0 | public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).verifyComplete();
} | StepVerifier.create(findIdMono).verifyComplete(); | public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNextCount(0).verifyComplete();
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).expectNextCount(0).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
}
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} |
`expectNext = DOMAIN_1` | public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
} | StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify(); | public void testDelete() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(1L).verifyComplete();
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).thenConsumeWhile(domain -> true).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} | class ReactiveLongIdDomainPartitionPartitionRepositoryIT {
private static final Long ID_1 = 12345L;
private static final String NAME_1 = "moary";
private static final Long ID_2 = 67890L;
private static final String NAME_2 = "camille";
private static final LongIdDomainPartition DOMAIN_1 = new LongIdDomainPartition(ID_1, NAME_1);
private static final LongIdDomainPartition DOMAIN_2 = new LongIdDomainPartition(ID_2, NAME_2);
private static final CosmosEntityInformation<LongIdDomainPartition, Integer> entityInformation =
new CosmosEntityInformation<>(LongIdDomainPartition.class);
private static CosmosTemplate staticTemplate;
private static boolean isSetupDone;
@Autowired
private CosmosTemplate template;
@Autowired
private ReactiveLongIdDomainPartitionRepository repository;
@Before
public void setUp() {
if (!isSetupDone) {
staticTemplate = template;
template.createContainerIfNotExists(entityInformation);
}
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
isSetupDone = true;
}
@After
public void cleanup() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
}
@AfterClass
public static void afterClassCleanup() {
staticTemplate.deleteContainer(entityInformation.getContainerName());
}
@Test
public void testLongIdDomainPartition() {
Mono<Void> deletedMono = this.repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<LongIdDomainPartition> idMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(idMono).expectNextCount(0).verifyComplete();
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Void> deleteMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> afterDelIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(afterDelIdMono).expectNextCount(0).verifyComplete();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidDomain() {
new CosmosEntityInformation<InvalidDomain, Long>(InvalidDomain.class);
}
@Test
public void testSaveAllAndFindAll() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Flux<LongIdDomainPartition> allFlux = repository.findAll();
StepVerifier.create(allFlux).expectNextCount(2).verifyComplete();
}
@Test
public void testCount() {
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(2L).verifyComplete();
}
@Test
public void testDeleteByIdWithoutPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber());
StepVerifier.create(deleteMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteByIdAndPartitionKey() {
final Mono<Void> deleteMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteMono).verifyComplete();
Mono<LongIdDomainPartition> findIdMono = this.repository.findById(ID_1,
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(findIdMono).expectNextCount(0).verifyComplete();
}
@Test
public void testDeleteByIdShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
final Mono<Void> deleteIdMono = repository.deleteById(DOMAIN_1.getNumber(),
new PartitionKey(entityInformation.getPartitionKeyFieldValue(DOMAIN_1)));
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
@Test
public void testDeleteShouldFailIfNothingToDelete() {
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Void> deleteIdMono = this.repository.delete(DOMAIN_1);
StepVerifier.create(deleteIdMono).expectError(CosmosAccessException.class).verify();
}
@Test
public void testDeleteAll() {
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, DOMAIN_2));
StepVerifier.create(savedAllFlux).expectNextCount(2).verifyComplete();
final Mono<Void> deletedMono = repository.deleteAll();
StepVerifier.create(deletedMono).thenAwait().verifyComplete();
Mono<Long> countMono = repository.count();
StepVerifier.create(countMono).expectNext(0L).verifyComplete();
}
@Test
public void testExistsById() {
Mono<LongIdDomainPartition> saveMono = this.repository.save(DOMAIN_1);
StepVerifier.create(saveMono).expectNext(DOMAIN_1).expectComplete().verify();
Mono<Boolean> booleanMono = this.repository.existsById(DOMAIN_1.getNumber());
StepVerifier.create(booleanMono).expectNext(true).expectComplete().verify();
}
@Test
public void testFindAllSort() {
final LongIdDomainPartition other = new LongIdDomainPartition(
DOMAIN_1.getNumber() + 1, "other-name");
Flux<LongIdDomainPartition> savedAllFlux = this.repository.saveAll(Arrays.asList(DOMAIN_1, other));
StepVerifier.create(savedAllFlux).thenConsumeWhile(domain -> true).expectComplete().verify();
final Sort ascSort = Sort.by(Sort.Direction.ASC, "number");
Flux<LongIdDomainPartition> ascAllFlux = this.repository.findAll(ascSort);
StepVerifier.create(ascAllFlux).expectNext(DOMAIN_1, other, DOMAIN_2).verifyComplete();
final Sort descSort = Sort.by(Sort.Direction.DESC, "number");
Flux<LongIdDomainPartition> descAllFlux = this.repository.findAll(descSort);
StepVerifier.create(descAllFlux).expectNext(DOMAIN_2, other, DOMAIN_1).verifyComplete();
}
private static class InvalidDomain {
private long count;
private String location;
InvalidDomain() {
}
InvalidDomain(long count, String location) {
this.count = count;
this.location = location;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InvalidDomain that = (InvalidDomain) o;
return count == that.count
&& Objects.equals(location, that.location);
}
@Override
public int hashCode() {
return Objects.hash(count, location);
}
@Override
public String toString() {
return "InvalidDomain{"
+ "count="
+ count
+ ", location='"
+ location
+ '\''
+ '}';
}
}
} |
remember to run `credcheck` for storage key | private Flux<VolumeParameters> createFileShareAsync(final StorageAccount storageAccount) {
return storageAccount
.getKeysAsync()
.map(storageAccountKeys -> storageAccountKeys.get(0).value())
.flatMapMany(
key -> {
ShareServiceAsyncClient shareServiceAsyncClient =
new ShareServiceClientBuilder()
.connectionString(
Utils.getStorageConnectionString(storageAccount.name(), key, manager().environment()))
.httpClient(manager().httpPipeline().getHttpClient())
.buildAsyncClient();
Objects.requireNonNull(newFileShares);
return Flux
.fromIterable(newFileShares.entrySet())
.flatMap(
fileShareEntry ->
createSingleFileShareAsync(
shareServiceAsyncClient, fileShareEntry.getKey(), fileShareEntry.getValue(), key));
});
} | .map(storageAccountKeys -> storageAccountKeys.get(0).value()) | private Flux<VolumeParameters> createFileShareAsync(final StorageAccount storageAccount) {
return storageAccount
.getKeysAsync()
.map(storageAccountKeys -> storageAccountKeys.get(0).value())
.flatMapMany(
key -> {
ShareServiceAsyncClient shareServiceAsyncClient =
new ShareServiceClientBuilder()
.connectionString(
Utils.getStorageConnectionString(storageAccount.name(), key, manager().environment()))
.httpClient(manager().httpPipeline().getHttpClient())
.buildAsyncClient();
Objects.requireNonNull(newFileShares);
return Flux
.fromIterable(newFileShares.entrySet())
.flatMap(
fileShareEntry ->
createSingleFileShareAsync(
shareServiceAsyncClient, fileShareEntry.getKey(), fileShareEntry.getValue(), key));
});
} | class VolumeParameters {
private String volumeName;
private String fileShareName;
private String storageAccountKey;
VolumeParameters(String volumeName, String fileShareName, String storageAccountKey) {
this.volumeName = volumeName;
this.fileShareName = fileShareName;
this.storageAccountKey = storageAccountKey;
}
} | class VolumeParameters {
private String volumeName;
private String fileShareName;
private String storageAccountKey;
VolumeParameters(String volumeName, String fileShareName, String storageAccountKey) {
this.volumeName = volumeName;
this.fileShareName = fileShareName;
this.storageAccountKey = storageAccountKey;
}
} |
All playback tests are not using the `Share File Volume`, so no keys are recorded. | private Flux<VolumeParameters> createFileShareAsync(final StorageAccount storageAccount) {
return storageAccount
.getKeysAsync()
.map(storageAccountKeys -> storageAccountKeys.get(0).value())
.flatMapMany(
key -> {
ShareServiceAsyncClient shareServiceAsyncClient =
new ShareServiceClientBuilder()
.connectionString(
Utils.getStorageConnectionString(storageAccount.name(), key, manager().environment()))
.httpClient(manager().httpPipeline().getHttpClient())
.buildAsyncClient();
Objects.requireNonNull(newFileShares);
return Flux
.fromIterable(newFileShares.entrySet())
.flatMap(
fileShareEntry ->
createSingleFileShareAsync(
shareServiceAsyncClient, fileShareEntry.getKey(), fileShareEntry.getValue(), key));
});
} | .map(storageAccountKeys -> storageAccountKeys.get(0).value()) | private Flux<VolumeParameters> createFileShareAsync(final StorageAccount storageAccount) {
return storageAccount
.getKeysAsync()
.map(storageAccountKeys -> storageAccountKeys.get(0).value())
.flatMapMany(
key -> {
ShareServiceAsyncClient shareServiceAsyncClient =
new ShareServiceClientBuilder()
.connectionString(
Utils.getStorageConnectionString(storageAccount.name(), key, manager().environment()))
.httpClient(manager().httpPipeline().getHttpClient())
.buildAsyncClient();
Objects.requireNonNull(newFileShares);
return Flux
.fromIterable(newFileShares.entrySet())
.flatMap(
fileShareEntry ->
createSingleFileShareAsync(
shareServiceAsyncClient, fileShareEntry.getKey(), fileShareEntry.getValue(), key));
});
} | class VolumeParameters {
private String volumeName;
private String fileShareName;
private String storageAccountKey;
VolumeParameters(String volumeName, String fileShareName, String storageAccountKey) {
this.volumeName = volumeName;
this.fileShareName = fileShareName;
this.storageAccountKey = storageAccountKey;
}
} | class VolumeParameters {
private String volumeName;
private String fileShareName;
private String storageAccountKey;
VolumeParameters(String volumeName, String fileShareName, String storageAccountKey) {
this.volumeName = volumeName;
this.fileShareName = fileShareName;
this.storageAccountKey = storageAccountKey;
}
} |
this is test and it doesn't matter much. but in general you should not create a ObjectMapper per method invocation, This is costly timewise. | private void validateJson(String jsonInString) {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.readTree(jsonInString);
} catch(JsonProcessingException ex) {
fail("Diagnostic string is not in json format");
}
} | ObjectMapper mapper = new ObjectMapper(); | private void validateJson(String jsonInString) {
try {
OBJECT_MAPPER.readTree(jsonInString);
} catch(JsonProcessingException ex) {
fail("Diagnostic string is not in json format");
}
} | class CosmosDiagnosticsTest extends TestSuiteBase {
private CosmosClient gatewayClient;
private CosmosClient directClient;
private CosmosContainer container;
private CosmosAsyncContainer cosmosAsyncContainer;
private CosmosClientBuilder cosmosClientBuilder;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() throws Exception {
assertThat(this.gatewayClient).isNull();
gatewayClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
directClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());
container = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
}
@AfterClass(groups = {"emulator"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.gatewayClient).isNotNull();
this.gatewayClient.close();
if (this.directClient != null) {
this.directClient.close();
}
}
@Test(groups = {"emulator"})
public void gatewayDiagnostics() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"operationType\":\"Create\"");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"emulator"})
public void gatewayDiagnosticsOnException() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
container.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"statusCode\":404");
assertThat(diagnostics).contains("\"operationType\":\"Read\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"emulator"})
public void systemDiagnosticsForSystemStateInformation() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("systemInformation");
assertThat(diagnostics).contains("usedMemory");
assertThat(diagnostics).contains("availableMemory");
assertThat(diagnostics).contains("processCpuLoad");
assertThat(diagnostics).contains("systemCpuLoad");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
}
@Test(groups = {"emulator"})
public void directDiagnostics() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = cosmosContainer.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(diagnostics).contains("supplementalResponseStatisticsList");
assertThat(diagnostics).contains("\"gatewayStatistics\":null");
assertThat(diagnostics).contains("addressResolutionStatistics");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"PARTITION_KEY_RANGE_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"SERVER_ADDRESS_LOOKUP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineDirect(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"emulator"})
public void directDiagnosticsOnException() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
cosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"emulator"})
public void supplementalResponseStatisticsList() throws Exception {
ClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics();
for (int i = 0; i < 15; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
ObjectMapper objectMapper = new ObjectMapper();
String diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
JsonNode jsonNode = objectMapper.readTree(diagnostics);
ArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(15);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);
clearStoreResponseStatistics(clientSideRequestStatistics);
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
assertThat(storeResponseStatistics.size()).isEqualTo(0);
for (int i = 0; i < 7; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
objectMapper = new ObjectMapper();
diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
jsonNode = objectMapper.readTree(diagnostics);
supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(7);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);
}
@Test(groups = {"emulator"})
public void serializationOnVariousScenarios() {
CosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();
String diagnostics = cosmosDatabase.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"DATABASE_DESERIALIZATION\"");
CosmosContainerResponse containerResponse = this.container.read();
diagnostics = containerResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"CONTAINER_DESERIALIZATION\"");
TestItem testItem = new TestItem();
testItem.id = "TestId";
testItem.mypk = "TestPk";
CosmosItemResponse<TestItem> itemResponse = this.container.createItem(testItem);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
testItem.id = "TestId2";
testItem.mypk = "TestPk";
itemResponse = this.container.createItem(testItem, new PartitionKey("TestPk"), null);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).doesNotContain("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).doesNotContain("\"serializationType\":\"ITEM_DESERIALIZATION\"");
TestItem readTestItem = itemResponse.getItem();
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
CosmosItemResponse<InternalObjectNode> readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);
InternalObjectNode properties = readItemResponse.getItem();
diagnostics = readItemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
}
private InternalObjectNode getInternalObjectNode() {
InternalObjectNode internalObjectNode = new InternalObjectNode();
internalObjectNode.setId(UUID.randomUUID().toString());
BridgeInternal.setProperty(internalObjectNode, "mypk", "test");
return internalObjectNode;
}
private List<ClientSideRequestStatistics.StoreResponseStatistics> getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
@SuppressWarnings({"unchecked"})
List<ClientSideRequestStatistics.StoreResponseStatistics> list
= (List<ClientSideRequestStatistics.StoreResponseStatistics>) storeResponseStatisticsField.get(requestStatistics);
return list;
}
private void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
storeResponseStatisticsField.set(requestStatistics, new ArrayList<ClientSideRequestStatistics.StoreResponseStatistics>());
}
private void validateTransportRequestTimelineGateway(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"requestSent\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
}
private void validateTransportRequestTimelineDirect(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"created\"");
assertThat(diagnostics).contains("\"eventName\":\"queued\"");
assertThat(diagnostics).contains("\"eventName\":\"pipelined\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
assertThat(diagnostics).contains("\"eventName\":\"completed\"");
}
public static class TestItem {
public String id;
public String mypk;
public TestItem() {
}
}
} | class CosmosDiagnosticsTest extends TestSuiteBase {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private CosmosClient gatewayClient;
private CosmosClient directClient;
private CosmosContainer container;
private CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() throws Exception {
assertThat(this.gatewayClient).isNull();
gatewayClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
directClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());
container = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.gatewayClient).isNotNull();
this.gatewayClient.close();
if (this.directClient != null) {
this.directClient.close();
}
}
@Test(groups = {"simple"})
public void gatewayDiagnostics() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"operationType\":\"Create\"");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void gatewayDiagnosticsOnException() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
container.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"statusCode\":404");
assertThat(diagnostics).contains("\"operationType\":\"Read\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
public void systemDiagnosticsForSystemStateInformation() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("systemInformation");
assertThat(diagnostics).contains("usedMemory");
assertThat(diagnostics).contains("availableMemory");
assertThat(diagnostics).contains("processCpuLoad");
assertThat(diagnostics).contains("systemCpuLoad");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
}
@Test(groups = {"simple"})
public void directDiagnostics() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = cosmosContainer.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(diagnostics).contains("supplementalResponseStatisticsList");
assertThat(diagnostics).contains("\"gatewayStatistics\":null");
assertThat(diagnostics).contains("addressResolutionStatistics");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"PARTITION_KEY_RANGE_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"SERVER_ADDRESS_LOOKUP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineDirect(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void directDiagnosticsOnException() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
cosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
public void supplementalResponseStatisticsList() throws Exception {
ClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics();
for (int i = 0; i < 15; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
ObjectMapper objectMapper = new ObjectMapper();
String diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
JsonNode jsonNode = objectMapper.readTree(diagnostics);
ArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(15);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);
clearStoreResponseStatistics(clientSideRequestStatistics);
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
assertThat(storeResponseStatistics.size()).isEqualTo(0);
for (int i = 0; i < 7; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
objectMapper = new ObjectMapper();
diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
jsonNode = objectMapper.readTree(diagnostics);
supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(7);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);
}
@Test(groups = {"simple"})
public void serializationOnVariousScenarios() {
CosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();
String diagnostics = cosmosDatabase.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"DATABASE_DESERIALIZATION\"");
CosmosContainerResponse containerResponse = this.container.read();
diagnostics = containerResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"CONTAINER_DESERIALIZATION\"");
TestItem testItem = new TestItem();
testItem.id = "TestId";
testItem.mypk = "TestPk";
CosmosItemResponse<TestItem> itemResponse = this.container.createItem(testItem);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
testItem.id = "TestId2";
testItem.mypk = "TestPk";
itemResponse = this.container.createItem(testItem, new PartitionKey("TestPk"), null);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).doesNotContain("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).doesNotContain("\"serializationType\":\"ITEM_DESERIALIZATION\"");
TestItem readTestItem = itemResponse.getItem();
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
CosmosItemResponse<InternalObjectNode> readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);
InternalObjectNode properties = readItemResponse.getItem();
diagnostics = readItemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
}
private InternalObjectNode getInternalObjectNode() {
InternalObjectNode internalObjectNode = new InternalObjectNode();
internalObjectNode.setId(UUID.randomUUID().toString());
BridgeInternal.setProperty(internalObjectNode, "mypk", "test");
return internalObjectNode;
}
private List<ClientSideRequestStatistics.StoreResponseStatistics> getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
@SuppressWarnings({"unchecked"})
List<ClientSideRequestStatistics.StoreResponseStatistics> list
= (List<ClientSideRequestStatistics.StoreResponseStatistics>) storeResponseStatisticsField.get(requestStatistics);
return list;
}
private void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
storeResponseStatisticsField.set(requestStatistics, new ArrayList<ClientSideRequestStatistics.StoreResponseStatistics>());
}
private void validateTransportRequestTimelineGateway(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"requestSent\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
}
private void validateTransportRequestTimelineDirect(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"created\"");
assertThat(diagnostics).contains("\"eventName\":\"queued\"");
assertThat(diagnostics).contains("\"eventName\":\"pipelined\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
assertThat(diagnostics).contains("\"eventName\":\"completed\"");
}
public static class TestItem {
public String id;
public String mypk;
public TestItem() {
}
}
} |
Yes good catch, although its test, but it should be on class level , i will change it in next iteration | private void validateJson(String jsonInString) {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.readTree(jsonInString);
} catch(JsonProcessingException ex) {
fail("Diagnostic string is not in json format");
}
} | ObjectMapper mapper = new ObjectMapper(); | private void validateJson(String jsonInString) {
try {
OBJECT_MAPPER.readTree(jsonInString);
} catch(JsonProcessingException ex) {
fail("Diagnostic string is not in json format");
}
} | class CosmosDiagnosticsTest extends TestSuiteBase {
private CosmosClient gatewayClient;
private CosmosClient directClient;
private CosmosContainer container;
private CosmosAsyncContainer cosmosAsyncContainer;
private CosmosClientBuilder cosmosClientBuilder;
@BeforeClass(groups = {"emulator"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() throws Exception {
assertThat(this.gatewayClient).isNull();
gatewayClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
directClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());
container = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
}
@AfterClass(groups = {"emulator"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.gatewayClient).isNotNull();
this.gatewayClient.close();
if (this.directClient != null) {
this.directClient.close();
}
}
@Test(groups = {"emulator"})
public void gatewayDiagnostics() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"operationType\":\"Create\"");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"emulator"})
public void gatewayDiagnosticsOnException() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
container.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"statusCode\":404");
assertThat(diagnostics).contains("\"operationType\":\"Read\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"emulator"})
public void systemDiagnosticsForSystemStateInformation() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("systemInformation");
assertThat(diagnostics).contains("usedMemory");
assertThat(diagnostics).contains("availableMemory");
assertThat(diagnostics).contains("processCpuLoad");
assertThat(diagnostics).contains("systemCpuLoad");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
}
@Test(groups = {"emulator"})
public void directDiagnostics() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = cosmosContainer.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(diagnostics).contains("supplementalResponseStatisticsList");
assertThat(diagnostics).contains("\"gatewayStatistics\":null");
assertThat(diagnostics).contains("addressResolutionStatistics");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"PARTITION_KEY_RANGE_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"SERVER_ADDRESS_LOOKUP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineDirect(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"emulator"})
public void directDiagnosticsOnException() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
cosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"emulator"})
public void supplementalResponseStatisticsList() throws Exception {
ClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics();
for (int i = 0; i < 15; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
ObjectMapper objectMapper = new ObjectMapper();
String diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
JsonNode jsonNode = objectMapper.readTree(diagnostics);
ArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(15);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);
clearStoreResponseStatistics(clientSideRequestStatistics);
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
assertThat(storeResponseStatistics.size()).isEqualTo(0);
for (int i = 0; i < 7; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
objectMapper = new ObjectMapper();
diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
jsonNode = objectMapper.readTree(diagnostics);
supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(7);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);
}
@Test(groups = {"emulator"})
public void serializationOnVariousScenarios() {
CosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();
String diagnostics = cosmosDatabase.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"DATABASE_DESERIALIZATION\"");
CosmosContainerResponse containerResponse = this.container.read();
diagnostics = containerResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"CONTAINER_DESERIALIZATION\"");
TestItem testItem = new TestItem();
testItem.id = "TestId";
testItem.mypk = "TestPk";
CosmosItemResponse<TestItem> itemResponse = this.container.createItem(testItem);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
testItem.id = "TestId2";
testItem.mypk = "TestPk";
itemResponse = this.container.createItem(testItem, new PartitionKey("TestPk"), null);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).doesNotContain("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).doesNotContain("\"serializationType\":\"ITEM_DESERIALIZATION\"");
TestItem readTestItem = itemResponse.getItem();
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
CosmosItemResponse<InternalObjectNode> readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);
InternalObjectNode properties = readItemResponse.getItem();
diagnostics = readItemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
}
private InternalObjectNode getInternalObjectNode() {
InternalObjectNode internalObjectNode = new InternalObjectNode();
internalObjectNode.setId(UUID.randomUUID().toString());
BridgeInternal.setProperty(internalObjectNode, "mypk", "test");
return internalObjectNode;
}
private List<ClientSideRequestStatistics.StoreResponseStatistics> getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
@SuppressWarnings({"unchecked"})
List<ClientSideRequestStatistics.StoreResponseStatistics> list
= (List<ClientSideRequestStatistics.StoreResponseStatistics>) storeResponseStatisticsField.get(requestStatistics);
return list;
}
private void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
storeResponseStatisticsField.set(requestStatistics, new ArrayList<ClientSideRequestStatistics.StoreResponseStatistics>());
}
private void validateTransportRequestTimelineGateway(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"requestSent\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
}
private void validateTransportRequestTimelineDirect(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"created\"");
assertThat(diagnostics).contains("\"eventName\":\"queued\"");
assertThat(diagnostics).contains("\"eventName\":\"pipelined\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
assertThat(diagnostics).contains("\"eventName\":\"completed\"");
}
public static class TestItem {
public String id;
public String mypk;
public TestItem() {
}
}
} | class CosmosDiagnosticsTest extends TestSuiteBase {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private CosmosClient gatewayClient;
private CosmosClient directClient;
private CosmosContainer container;
private CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() throws Exception {
assertThat(this.gatewayClient).isNull();
gatewayClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
directClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());
container = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.gatewayClient).isNotNull();
this.gatewayClient.close();
if (this.directClient != null) {
this.directClient.close();
}
}
@Test(groups = {"simple"})
public void gatewayDiagnostics() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"operationType\":\"Create\"");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void gatewayDiagnosticsOnException() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
container.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"statusCode\":404");
assertThat(diagnostics).contains("\"operationType\":\"Read\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
public void systemDiagnosticsForSystemStateInformation() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("systemInformation");
assertThat(diagnostics).contains("usedMemory");
assertThat(diagnostics).contains("availableMemory");
assertThat(diagnostics).contains("processCpuLoad");
assertThat(diagnostics).contains("systemCpuLoad");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
}
@Test(groups = {"simple"})
public void directDiagnostics() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = cosmosContainer.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(diagnostics).contains("supplementalResponseStatisticsList");
assertThat(diagnostics).contains("\"gatewayStatistics\":null");
assertThat(diagnostics).contains("addressResolutionStatistics");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"PARTITION_KEY_RANGE_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"SERVER_ADDRESS_LOOKUP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineDirect(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void directDiagnosticsOnException() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
cosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
public void supplementalResponseStatisticsList() throws Exception {
ClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics();
for (int i = 0; i < 15; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
ObjectMapper objectMapper = new ObjectMapper();
String diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
JsonNode jsonNode = objectMapper.readTree(diagnostics);
ArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(15);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);
clearStoreResponseStatistics(clientSideRequestStatistics);
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
assertThat(storeResponseStatistics.size()).isEqualTo(0);
for (int i = 0; i < 7; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
objectMapper = new ObjectMapper();
diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
jsonNode = objectMapper.readTree(diagnostics);
supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(7);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);
}
@Test(groups = {"simple"})
public void serializationOnVariousScenarios() {
CosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();
String diagnostics = cosmosDatabase.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"DATABASE_DESERIALIZATION\"");
CosmosContainerResponse containerResponse = this.container.read();
diagnostics = containerResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"CONTAINER_DESERIALIZATION\"");
TestItem testItem = new TestItem();
testItem.id = "TestId";
testItem.mypk = "TestPk";
CosmosItemResponse<TestItem> itemResponse = this.container.createItem(testItem);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
testItem.id = "TestId2";
testItem.mypk = "TestPk";
itemResponse = this.container.createItem(testItem, new PartitionKey("TestPk"), null);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).doesNotContain("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).doesNotContain("\"serializationType\":\"ITEM_DESERIALIZATION\"");
TestItem readTestItem = itemResponse.getItem();
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
CosmosItemResponse<InternalObjectNode> readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);
InternalObjectNode properties = readItemResponse.getItem();
diagnostics = readItemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
}
private InternalObjectNode getInternalObjectNode() {
InternalObjectNode internalObjectNode = new InternalObjectNode();
internalObjectNode.setId(UUID.randomUUID().toString());
BridgeInternal.setProperty(internalObjectNode, "mypk", "test");
return internalObjectNode;
}
private List<ClientSideRequestStatistics.StoreResponseStatistics> getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
@SuppressWarnings({"unchecked"})
List<ClientSideRequestStatistics.StoreResponseStatistics> list
= (List<ClientSideRequestStatistics.StoreResponseStatistics>) storeResponseStatisticsField.get(requestStatistics);
return list;
}
private void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
storeResponseStatisticsField.set(requestStatistics, new ArrayList<ClientSideRequestStatistics.StoreResponseStatistics>());
}
private void validateTransportRequestTimelineGateway(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"requestSent\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
}
private void validateTransportRequestTimelineDirect(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"created\"");
assertThat(diagnostics).contains("\"eventName\":\"queued\"");
assertThat(diagnostics).contains("\"eventName\":\"pipelined\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
assertThat(diagnostics).contains("\"eventName\":\"completed\"");
}
public static class TestItem {
public String id;
public String mypk;
public TestItem() {
}
}
} |
We can make this inline. ```suggestion return ";" + USER_AGENT_SUFFIX; ``` | private static String getUserAgentSuffix() {
String suffix = ";" + USER_AGENT_SUFFIX;
return suffix;
} | return suffix; | private static String getUserAgentSuffix() {
return ";" + USER_AGENT_SUFFIX;
} | class CosmosFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(CosmosFactory.class);
private final CosmosAsyncClient cosmosAsyncClient;
private final String databaseName;
private static final String USER_AGENT_SUFFIX =
Constants.USER_AGENT_SUFFIX + PropertyLoader.getProjectVersion();
/**
* Validate config and initialization
*
* @param cosmosAsyncClient cosmosAsyncClient
* @param databaseName databaseName
*/
public CosmosFactory(CosmosAsyncClient cosmosAsyncClient, String databaseName) {
Assert.notNull(cosmosAsyncClient, "cosmosAsyncClient must not be null!");
Assert.notNull(databaseName, "databaseName must not be null!");
this.cosmosAsyncClient = cosmosAsyncClient;
this.databaseName = databaseName;
}
/**
* To create a CosmosAsyncClient
*
* @return CosmosAsyncClient
*/
public CosmosAsyncClient getCosmosClient() {
return this.cosmosAsyncClient;
}
/**
* Get Cosmos Database Name
* @return Cosmos Database Name
*/
public String getDatabaseName() {
return this.databaseName;
}
/**
* Create Cosmos Async Client
*
* @param config CosmosClientConfig
* @return CosmosAsyncClient
*/
public static CosmosAsyncClient createCosmosAsyncClient(CosmosClientConfig config) {
final CosmosClientBuilder cosmosClientBuilder = getCosmosClientBuilderFromConfig(config);
return cosmosClientBuilder.buildAsyncClient();
}
private static CosmosClientBuilder getCosmosClientBuilderFromConfig(CosmosClientConfig cosmosClientConfig) {
final CosmosClientBuilder cosmosClientBuilder = cosmosClientConfig.getCosmosClientBuilder();
cosmosClientBuilder.contentResponseOnWriteEnabled(true);
final String userAgentSuffixValue = getUserAgentSuffixValue(cosmosClientBuilder);
String userAgentSuffix = getUserAgentSuffix();
if (!userAgentSuffixValue.contains(userAgentSuffix)) {
userAgentSuffix += userAgentSuffixValue;
}
return cosmosClientConfig.getCosmosClientBuilder().userAgentSuffix(userAgentSuffix);
}
private static String getUserAgentSuffixValue(CosmosClientBuilder cosmosClientBuilder) {
final Field userAgentSuffix = FieldUtils.getDeclaredField(CosmosClientBuilder.class,
"userAgentSuffix", true);
try {
return (String) userAgentSuffix.get(cosmosClientBuilder);
} catch (IllegalAccessException e) {
LOGGER.error("Error occurred while getting userAgentSuffix from CosmosClientBuilder",
e);
}
return "";
}
} | class CosmosFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(CosmosFactory.class);
private final CosmosAsyncClient cosmosAsyncClient;
private final String databaseName;
private static final String USER_AGENT_SUFFIX =
Constants.USER_AGENT_SUFFIX + PropertyLoader.getProjectVersion();
/**
* Validate config and initialization
*
* @param cosmosAsyncClient cosmosAsyncClient
* @param databaseName databaseName
*/
public CosmosFactory(CosmosAsyncClient cosmosAsyncClient, String databaseName) {
Assert.notNull(cosmosAsyncClient, "cosmosAsyncClient must not be null!");
Assert.notNull(databaseName, "databaseName must not be null!");
this.cosmosAsyncClient = cosmosAsyncClient;
this.databaseName = databaseName;
}
/**
* To create a CosmosAsyncClient
*
* @return CosmosAsyncClient
*/
public CosmosAsyncClient getCosmosAsyncClient() {
return this.cosmosAsyncClient;
}
/**
* Get Cosmos Database Name
* @return Cosmos Database Name
*/
public String getDatabaseName() {
return this.databaseName;
}
/**
* Create Cosmos Async Client
*
* @param config CosmosClientConfig
* @return CosmosAsyncClient
*/
public static CosmosAsyncClient createCosmosAsyncClient(CosmosClientConfig config) {
final CosmosClientBuilder cosmosClientBuilder = getCosmosClientBuilderFromConfig(config);
return cosmosClientBuilder.buildAsyncClient();
}
private static CosmosClientBuilder getCosmosClientBuilderFromConfig(CosmosClientConfig cosmosClientConfig) {
final CosmosClientBuilder cosmosClientBuilder = cosmosClientConfig.getCosmosClientBuilder();
cosmosClientBuilder.contentResponseOnWriteEnabled(true);
final String userAgentSuffixValue = getUserAgentSuffixValue(cosmosClientBuilder);
String userAgentSuffix = getUserAgentSuffix();
if (!userAgentSuffixValue.contains(userAgentSuffix)) {
userAgentSuffix += userAgentSuffixValue;
}
return cosmosClientConfig.getCosmosClientBuilder().userAgentSuffix(userAgentSuffix);
}
private static String getUserAgentSuffixValue(CosmosClientBuilder cosmosClientBuilder) {
final Field userAgentSuffix = FieldUtils.getDeclaredField(CosmosClientBuilder.class,
"userAgentSuffix", true);
try {
return (String) userAgentSuffix.get(cosmosClientBuilder);
} catch (IllegalAccessException e) {
LOGGER.error("Error occurred while getting userAgentSuffix from CosmosClientBuilder",
e);
}
return "";
}
} |
👍 | private static String getPropertyByName(@NonNull String name, @NonNull String filename) {
final Properties properties = new Properties();
final InputStream inputStream = PropertyLoader.class.getResourceAsStream(filename);
if (inputStream == null) {
return null;
}
try {
properties.load(inputStream);
} catch (IOException e) {
} finally {
try {
inputStream.close();
} catch (IOException e) {
}
}
return properties.getProperty(name);
} | final InputStream inputStream = PropertyLoader.class.getResourceAsStream(filename); | private static String getPropertyByName(@NonNull String name, @NonNull String filename) {
final Properties properties = new Properties();
final InputStream inputStream = PropertyLoader.class.getResourceAsStream(filename);
if (inputStream == null) {
return null;
}
try {
properties.load(inputStream);
} catch (IOException e) {
} finally {
try {
inputStream.close();
} catch (IOException e) {
}
}
return properties.getProperty(name);
} | class PropertyLoader {
private static final String PROJECT_PROPERTY_FILE = "/META-INF/project.properties";
private static final String APPLICATION_PROPERTY_FILE = "/application.properties";
private static final String APPLICATION_YML_FILE = "/application.yml";
private PropertyLoader() {
}
/**
* Get project version from /META-INF/project.properties
*
* @return String project version
*/
public static String getProjectVersion() {
return getPropertyByName("project.version", PROJECT_PROPERTY_FILE);
}
} | class PropertyLoader {
private static final String PROJECT_PROPERTY_FILE = "/META-INF/project.properties";
private static final String APPLICATION_PROPERTY_FILE = "/application.properties";
private static final String APPLICATION_YML_FILE = "/application.yml";
private PropertyLoader() {
}
/**
* Get project version from /META-INF/project.properties
*
* @return String project version
*/
public static String getProjectVersion() {
return getPropertyByName("project.version", PROJECT_PROPERTY_FILE);
}
} |
this class is getting instantiated in a for loop in your benchmark, meaning the logger initialization will be called per for loop iteration. logger should be static to avoid initialization cost per loop iteration. | public BenchmarkRequestSubscriber(Meter successMeter, Meter failureMeter, Semaphore concurrencyControlSemaphore, AtomicLong count) {
this.successMeter = successMeter;
this.failureMeter = failureMeter;
this.concurrencyControlSemaphore = concurrencyControlSemaphore;
this.count = count;
logger = LoggerFactory.getLogger(this.getClass());
} | logger = LoggerFactory.getLogger(this.getClass()); | public BenchmarkRequestSubscriber(Meter successMeter, Meter failureMeter, Semaphore concurrencyControlSemaphore, AtomicLong count) {
this.successMeter = successMeter;
this.failureMeter = failureMeter;
this.concurrencyControlSemaphore = concurrencyControlSemaphore;
this.count = count;
} | class BenchmarkRequestSubscriber<T> extends BaseSubscriber<T> {
final Logger logger;
private Meter successMeter;
private Meter failureMeter;
private Semaphore concurrencyControlSemaphore;
private AtomicLong count;
Timer.Context context;
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
logger.debug("hookOnNext: {}, count:{}", value, count.get());
}
@Override
protected void hookOnComplete() {
context.stop();
successMeter.mark();
concurrencyControlSemaphore.release();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
failureMeter.mark();
logger.error("Encountered failure {} on thread {}" ,
throwable.getMessage(), Thread.currentThread().getName(), throwable);
concurrencyControlSemaphore.release();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
} | class BenchmarkRequestSubscriber<T> extends BaseSubscriber<T> {
final static Logger logger = LoggerFactory.getLogger(BenchmarkRequestSubscriber.class);
private Meter successMeter;
private Meter failureMeter;
private Semaphore concurrencyControlSemaphore;
private AtomicLong count;
public Timer.Context context;
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
logger.debug("hookOnNext: {}, count:{}", value, count.get());
}
@Override
protected void hookOnComplete() {
context.stop();
successMeter.mark();
concurrencyControlSemaphore.release();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
failureMeter.mark();
logger.error("Encountered failure {} on thread {}" ,
throwable.getMessage(), Thread.currentThread().getName(), throwable);
concurrencyControlSemaphore.release();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
} |
done | public BenchmarkRequestSubscriber(Meter successMeter, Meter failureMeter, Semaphore concurrencyControlSemaphore, AtomicLong count) {
this.successMeter = successMeter;
this.failureMeter = failureMeter;
this.concurrencyControlSemaphore = concurrencyControlSemaphore;
this.count = count;
logger = LoggerFactory.getLogger(this.getClass());
} | logger = LoggerFactory.getLogger(this.getClass()); | public BenchmarkRequestSubscriber(Meter successMeter, Meter failureMeter, Semaphore concurrencyControlSemaphore, AtomicLong count) {
this.successMeter = successMeter;
this.failureMeter = failureMeter;
this.concurrencyControlSemaphore = concurrencyControlSemaphore;
this.count = count;
} | class BenchmarkRequestSubscriber<T> extends BaseSubscriber<T> {
final Logger logger;
private Meter successMeter;
private Meter failureMeter;
private Semaphore concurrencyControlSemaphore;
private AtomicLong count;
Timer.Context context;
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
logger.debug("hookOnNext: {}, count:{}", value, count.get());
}
@Override
protected void hookOnComplete() {
context.stop();
successMeter.mark();
concurrencyControlSemaphore.release();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
failureMeter.mark();
logger.error("Encountered failure {} on thread {}" ,
throwable.getMessage(), Thread.currentThread().getName(), throwable);
concurrencyControlSemaphore.release();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
} | class BenchmarkRequestSubscriber<T> extends BaseSubscriber<T> {
final static Logger logger = LoggerFactory.getLogger(BenchmarkRequestSubscriber.class);
private Meter successMeter;
private Meter failureMeter;
private Semaphore concurrencyControlSemaphore;
private AtomicLong count;
public Timer.Context context;
@Override
protected void hookOnSubscribe(Subscription subscription) {
super.hookOnSubscribe(subscription);
}
@Override
protected void hookOnNext(T value) {
logger.debug("hookOnNext: {}, count:{}", value, count.get());
}
@Override
protected void hookOnComplete() {
context.stop();
successMeter.mark();
concurrencyControlSemaphore.release();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
@Override
protected void hookOnError(Throwable throwable) {
context.stop();
failureMeter.mark();
logger.error("Encountered failure {} on thread {}" ,
throwable.getMessage(), Thread.currentThread().getName(), throwable);
concurrencyControlSemaphore.release();
synchronized (count) {
count.incrementAndGet();
count.notify();
}
}
} |
as a code style, we should try to either 1. have all args on the same line 2. or if there are many, have one arg per line. Please try to follow that here and in other new code. | public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter, concurrencyControlSemaphore, count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter, concurrencyControlSemaphore, count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter, concurrencyControlSemaphore, count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
} | readFailureMeter, concurrencyControlSemaphore, count); | public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter,
concurrencyControlSemaphore,
count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter,
concurrencyControlSemaphore,
count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter,
concurrencyControlSemaphore,
count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct, using default {} {} {}";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct = 90;
private int writePct = 9;
private int queryPct = 1;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
}
private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} catch (NumberFormatException ex) {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct, writePct,
queryPct);
}
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct ";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct;
private int writePct;
private int queryPct;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
}
private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} catch (NumberFormatException ex) {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} |
if the input config is not valid, shouldn't we log error and terminate? otherwise invalid config may go unnoticed. I don't see you throwing any exception on invalid config. | private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} catch (NumberFormatException ex) {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct, writePct,
queryPct);
}
} | } | private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} catch (NumberFormatException ex) {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct, using default {} {} {}";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct = 90;
private int writePct = 9;
private int queryPct = 1;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
}
public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter, concurrencyControlSemaphore, count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter, concurrencyControlSemaphore, count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter, concurrencyControlSemaphore, count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct ";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct;
private int writePct;
private int queryPct;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
}
public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter,
concurrencyControlSemaphore,
count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter,
concurrencyControlSemaphore,
count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter,
concurrencyControlSemaphore,
count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} |
so on query, we just run an orderby by query and we expect it to not fail, but we don't validate the result. am I right? | private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
} | obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10); | private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct, using default {} {} {}";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct = 90;
private int writePct = 9;
private int queryPct = 1;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter, concurrencyControlSemaphore, count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter, concurrencyControlSemaphore, count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter, concurrencyControlSemaphore, count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} catch (NumberFormatException ex) {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct, writePct,
queryPct);
}
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct ";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct;
private int writePct;
private int queryPct;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter,
concurrencyControlSemaphore,
count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter,
concurrencyControlSemaphore,
count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter,
concurrencyControlSemaphore,
count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} catch (NumberFormatException ex) {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} |
Yes i was printing warning and using default, but throwing exception would be more clear. Done | private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} catch (NumberFormatException ex) {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct, writePct,
queryPct);
}
} | } | private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} catch (NumberFormatException ex) {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct, using default {} {} {}";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct = 90;
private int writePct = 9;
private int queryPct = 1;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
}
public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter, concurrencyControlSemaphore, count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter, concurrencyControlSemaphore, count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter, concurrencyControlSemaphore, count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct ";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct;
private int writePct;
private int queryPct;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
}
public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter,
concurrencyControlSemaphore,
count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter,
concurrencyControlSemaphore,
count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter,
concurrencyControlSemaphore,
count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} |
Yes we are not validating result , as it would put extra perf to our sdk query end to end result. We are only expecting it not to fail for a successful result. | private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
} | obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10); | private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct, using default {} {} {}";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct = 90;
private int writePct = 9;
private int queryPct = 1;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter, concurrencyControlSemaphore, count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter, concurrencyControlSemaphore, count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter, concurrencyControlSemaphore, count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} catch (NumberFormatException ex) {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct, writePct,
queryPct);
}
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct ";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct;
private int writePct;
private int queryPct;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter,
concurrencyControlSemaphore,
count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter,
concurrencyControlSemaphore,
count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter,
concurrencyControlSemaphore,
count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
}
private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} catch (NumberFormatException ex) {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} |
done | public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter, concurrencyControlSemaphore, count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter, concurrencyControlSemaphore, count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter, concurrencyControlSemaphore, count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
} | readFailureMeter, concurrencyControlSemaphore, count); | public void run() throws Exception {
readSuccessMeter = metricsRegistry.meter("
readFailureMeter = metricsRegistry.meter("
writeSuccessMeter = metricsRegistry.meter("
writeFailureMeter = metricsRegistry.meter("
querySuccessMeter = metricsRegistry.meter("
queryFailureMeter = metricsRegistry.meter("
readLatency = metricsRegistry.timer("Read Latency");
writeLatency = metricsRegistry.timer("Write Latency");
queryLatency = metricsRegistry.timer("Query Latency");
reporter.start(configuration.getPrintingInterval(), TimeUnit.SECONDS);
long startTime = System.currentTimeMillis();
AtomicLong count = new AtomicLong(0);
long i;
int writeRange = readPct + writePct;
for (i = 0; BenchmarkHelper.shouldContinue(startTime, i, configuration); i++) {
int index = (int) i % 100;
if (index < readPct) {
BenchmarkRequestSubscriber<Object> readSubscriber = new BenchmarkRequestSubscriber<>(readSuccessMeter,
readFailureMeter,
concurrencyControlSemaphore,
count);
readSubscriber.context = readLatency.time();
performWorkload(readSubscriber, OperationType.Read, i);
} else if (index < writeRange) {
BenchmarkRequestSubscriber<Object> writeSubscriber = new BenchmarkRequestSubscriber<>(writeSuccessMeter,
writeFailureMeter,
concurrencyControlSemaphore,
count);
writeSubscriber.context = writeLatency.time();
performWorkload(writeSubscriber, OperationType.Create, i);
} else {
BenchmarkRequestSubscriber<Object> querySubscriber = new BenchmarkRequestSubscriber<>(querySuccessMeter,
queryFailureMeter,
concurrencyControlSemaphore,
count);
querySubscriber.context = queryLatency.time();
performWorkload(querySubscriber, OperationType.Query, i);
}
}
synchronized (count) {
while (count.get() < i) {
count.wait();
}
}
long endTime = System.currentTimeMillis();
logger.info("[{}] operations performed in [{}] seconds.",
configuration.getNumberOfOperations(), (int) ((endTime - startTime) / 1000));
reporter.report();
reporter.close();
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct, using default {} {} {}";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct = 90;
private int writePct = 9;
private int queryPct = 1;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
}
private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} catch (NumberFormatException ex) {
logger.warn(PERCENT_PARSING_ERROR, readPct,
writePct, queryPct);
}
} else {
logger.warn(PERCENT_PARSING_ERROR, readPct, writePct,
queryPct);
}
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId, dataFieldValue, partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} | class AsyncCtlWorkload {
private final String PERCENT_PARSING_ERROR = "Unable to parse user provided readWriteQueryPct ";
private final String prefixUuidForCreate;
private final String dataFieldValue;
private final String partitionKey;
private final MetricRegistry metricsRegistry = new MetricRegistry();
private final Logger logger;
private final CosmosAsyncClient cosmosClient;
private final Configuration configuration;
private final Map<String, List<PojoizedJson>> docsToRead = new HashMap<>();
private final Semaphore concurrencyControlSemaphore;
private final Random random;
private Timer readLatency;
private Timer writeLatency;
private Timer queryLatency;
private ScheduledReporter reporter;
private Meter readSuccessMeter;
private Meter readFailureMeter;
private Meter writeSuccessMeter;
private Meter writeFailureMeter;
private Meter querySuccessMeter;
private Meter queryFailureMeter;
private CosmosAsyncDatabase cosmosAsyncDatabase;
private List<CosmosAsyncContainer> containers = new ArrayList<>();
private List<String> containerToClearAfterTest = new ArrayList<>();
private boolean databaseCreated;
private int readPct;
private int writePct;
private int queryPct;
public AsyncCtlWorkload(Configuration cfg) {
CosmosClientBuilder cosmosClientBuilder = new CosmosClientBuilder()
.endpoint(cfg.getServiceEndpoint())
.key(cfg.getMasterKey())
.consistencyLevel(cfg.getConsistencyLevel())
.contentResponseOnWriteEnabled(Boolean.parseBoolean(cfg.isContentResponseOnWriteEnabled()));
if (cfg.getConnectionMode().equals(ConnectionMode.DIRECT)) {
cosmosClientBuilder = cosmosClientBuilder.directMode(DirectConnectionConfig.getDefaultConfig());
} else {
GatewayConnectionConfig gatewayConnectionConfig = new GatewayConnectionConfig();
gatewayConnectionConfig.setMaxConnectionPoolSize(cfg.getMaxConnectionPoolSize());
cosmosClientBuilder = cosmosClientBuilder.gatewayMode(gatewayConnectionConfig);
}
cosmosClient = cosmosClientBuilder.buildAsyncClient();
configuration = cfg;
logger = LoggerFactory.getLogger(this.getClass());
parsedReadWriteQueryPct(configuration.getReadWriteQueryPct());
createDatabaseAndContainers(configuration);
partitionKey = containers.get(0).read().block().getProperties().getPartitionKeyDefinition()
.getPaths().iterator().next().split("/")[1];
concurrencyControlSemaphore = new Semaphore(cfg.getConcurrency());
logger.info("PRE-populating {} documents ....", cfg.getNumberOfPreCreatedDocuments());
dataFieldValue = RandomStringUtils.randomAlphabetic(configuration.getDocumentDataFieldSize());
createPrePopulatedDocs(configuration.getNumberOfPreCreatedDocuments());
if (configuration.isEnableJvmStats()) {
metricsRegistry.register("gc", new GarbageCollectorMetricSet());
metricsRegistry.register("threads", new CachedThreadStatesGaugeSet(10, TimeUnit.SECONDS));
metricsRegistry.register("memory", new MemoryUsageGaugeSet());
}
initializeReporter(cfg);
MeterRegistry registry = configuration.getAzureMonitorMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
registry = configuration.getGraphiteMeterRegistry();
if (registry != null) {
BridgeInternal.monitorTelemetry(registry);
}
prefixUuidForCreate = UUID.randomUUID().toString();
random = new Random();
}
public void shutdown() {
if (this.databaseCreated) {
cosmosAsyncDatabase.delete().block();
logger.info("Deleted temporary database {} created for this test", this.configuration.getDatabaseId());
} else if (containerToClearAfterTest.size() > 0) {
for (String id : containerToClearAfterTest) {
cosmosAsyncDatabase.getContainer(id).delete().block();
logger.info("Deleted temporary collection {} created for this test", id);
}
}
cosmosClient.close();
}
private void performWorkload(BaseSubscriber<Object> documentSubscriber, OperationType type, long i) throws Exception {
Flux<? extends Object> obs;
CosmosAsyncContainer container = containers.get((int) i % containers.size());
if (type.equals(OperationType.Create)) {
PojoizedJson data = BenchmarkHelper.generateDocument(prefixUuidForCreate + i,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
obs = container.createItem(data).flux();
} else if (type.equals(OperationType.Query)) {
CosmosQueryRequestOptions options = new CosmosQueryRequestOptions();
String sqlQuery = "Select top 100 * from c order by c._ts";
obs = container.queryItems(sqlQuery, options, PojoizedJson.class).byPage(10);
} else {
int index = random.nextInt(1000);
RequestOptions options = new RequestOptions();
String partitionKeyValue = docsToRead.get(container.getId()).get(index).getId();
options.setPartitionKey(new PartitionKey(partitionKeyValue));
obs = container.readItem(docsToRead.get(container.getId()).get(index).getId(),
new PartitionKey(partitionKeyValue),
PojoizedJson.class)
.flux();
}
concurrencyControlSemaphore.acquire();
obs.subscribeOn(Schedulers.parallel()).subscribe(documentSubscriber);
}
private void parsedReadWriteQueryPct(String readWriteQueryPct) {
String[] readWriteQueryPctList = readWriteQueryPct.split(",");
if (readWriteQueryPctList.length == 3) {
try {
if (Integer.valueOf(readWriteQueryPctList[0]) + Integer.valueOf(readWriteQueryPctList[1]) + Integer.valueOf(readWriteQueryPctList[2]) == 100) {
readPct = Integer.valueOf(readWriteQueryPctList[0]);
writePct = Integer.valueOf(readWriteQueryPctList[1]);
queryPct = Integer.valueOf(readWriteQueryPctList[2]);
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} catch (NumberFormatException ex) {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
} else {
throw new IllegalArgumentException(PERCENT_PARSING_ERROR + readWriteQueryPct);
}
}
private void createPrePopulatedDocs(int numberOfPreCreatedDocuments) {
for (CosmosAsyncContainer container : containers) {
ArrayList<Flux<PojoizedJson>> createDocumentObservables = new ArrayList<>();
for (int i = 0; i < numberOfPreCreatedDocuments; i++) {
String uId = UUID.randomUUID().toString();
PojoizedJson newDoc = BenchmarkHelper.generateDocument(uId,
dataFieldValue,
partitionKey,
configuration.getDocumentDataFieldCount());
Flux<PojoizedJson> obs = container.createItem(newDoc).map(resp -> {
PojoizedJson x =
resp.getItem();
return x;
}).flux();
createDocumentObservables.add(obs);
}
logger.info("Finished pre-populating {} documents for container {}",
numberOfPreCreatedDocuments, container.getId());
docsToRead.put(container.getId(),
Flux.merge(Flux.fromIterable(createDocumentObservables), 100).collectList().block());
}
}
private void createDatabaseAndContainers(Configuration cfg) {
try {
cosmosAsyncDatabase = cosmosClient.getDatabase(this.configuration.getDatabaseId());
cosmosAsyncDatabase.read().block();
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosClient.createDatabase(cfg.getDatabaseId()).block();
cosmosAsyncDatabase = cosmosClient.getDatabase(cfg.getDatabaseId());
logger.info("Database {} is created for this test", this.configuration.getDatabaseId());
databaseCreated = true;
} else {
throw e;
}
}
int numberOfCollection = cfg.getNumberOfCollectionForCtl();
if (numberOfCollection < 1) {
numberOfCollection = 1;
}
for (int i = 1; i <= numberOfCollection; i++) {
try {
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
cosmosAsyncContainer.read().block();
containers.add(cosmosAsyncContainer);
} catch (CosmosException e) {
if (e.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
cosmosAsyncDatabase.createContainer(
this.configuration.getCollectionId() + "_" + i,
Configuration.DEFAULT_PARTITION_KEY_PATH,
ThroughputProperties.createManualThroughput(this.configuration.getThroughput())
).block();
CosmosAsyncContainer cosmosAsyncContainer =
cosmosAsyncDatabase.getContainer(this.configuration.getCollectionId() + "_" + i);
logger.info("Collection {} is created for this test",
this.configuration.getCollectionId() + "_" + i);
containers.add(cosmosAsyncContainer);
containerToClearAfterTest.add(cosmosAsyncContainer.getId());
} else {
throw e;
}
}
}
}
private void initializeReporter(Configuration configuration) {
if (configuration.getGraphiteEndpoint() != null) {
final Graphite graphite = new Graphite(new InetSocketAddress(
configuration.getGraphiteEndpoint(),
configuration.getGraphiteEndpointPort()));
reporter = GraphiteReporter.forRegistry(metricsRegistry)
.prefixedWith(configuration.getOperationType().name())
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.filter(MetricFilter.ALL)
.build(graphite);
} else if (configuration.getReportingDirectory() != null) {
reporter = CsvReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build(configuration.getReportingDirectory());
} else {
reporter = ConsoleReporter.forRegistry(metricsRegistry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.build();
}
}
} |
we have different `queryDatabase` API and all are calling one. You don't need to repeat this on every different form. you could just add this to `queryDatabaseInternal()` | public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
if (options == null) {
options = new CosmosQueryRequestOptions();
}
return queryDatabasesInternal(querySpec, options);
} | return queryDatabasesInternal(querySpec, options); | public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
if (options == null) {
options = new CosmosQueryRequestOptions();
}
return queryDatabasesInternal(querySpec, options);
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
if (options == null) {
options = new CosmosQueryRequestOptions();
}
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
if (options == null) {
options = new CosmosQueryRequestOptions();
}
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
What you refer is the one without annotation on Method. However, if we do not have annoatation, we cannot guarantee what we pass is the getter method with 'get' prefix. I don't think we need to check too much things here as validating whether it is the invoked getter is a huge cost. | public void testPropertyNameOnMethodName() throws NoSuchMethodException {
class Hotel {
String hotelName;
public String getHotelName() {
return hotelName;
}
}
Method m = Hotel.class.getDeclaredMethod("getHotelName");
assertMemberValue(m, "getHotelName");
} | assertMemberValue(m, "getHotelName"); | public void testPropertyNameOnMethodName() throws NoSuchMethodException {
class LocalHotel {
String hotelName;
public String getHotelName() {
return hotelName;
}
}
Method m = LocalHotel.class.getDeclaredMethod("getHotelName");
assertNull(serializer.convertMemberName(m));
} | class Hotel {
@SerializedName(value = "")
String hotelName;
} | class LocalHotel {
@SerializedName(value = "")
String hotelName;
} |
I thought that too. We are following this explicit check on api entry point in crud operations. So one thing is consistency, second we have github item https://github.com/Azure/azure-sdk-for-java/issues/13031 , where we will refactor all apis in one work item, otherwise it will be confusing if we do it for some and leave others | public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
if (options == null) {
options = new CosmosQueryRequestOptions();
}
return queryDatabasesInternal(querySpec, options);
} | return queryDatabasesInternal(querySpec, options); | public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) {
if (options == null) {
options = new CosmosQueryRequestOptions();
}
return queryDatabasesInternal(querySpec, options);
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
if (options == null) {
options = new CosmosQueryRequestOptions();
}
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} | class CosmosAsyncClient implements Closeable {
private final Configs configs;
private final AsyncDocumentClient asyncDocumentClient;
private final String serviceEndpoint;
private final String keyOrResourceToken;
private final ConnectionPolicy connectionPolicy;
private final ConsistencyLevel desiredConsistencyLevel;
private final List<CosmosPermissionProperties> permissions;
private final CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver;
private final AzureKeyCredential credential;
private final boolean sessionCapturingOverride;
private final boolean enableTransportClientSharing;
private final TracerProvider tracerProvider;
private final boolean contentResponseOnWriteEnabled;
private static final Tracer TRACER;
static {
ServiceLoader<Tracer> serviceLoader = ServiceLoader.load(Tracer.class);
Iterator<?> iterator = serviceLoader.iterator();
if (iterator.hasNext()) {
TRACER = serviceLoader.iterator().next();
} else {
TRACER = null;
}
}
CosmosAsyncClient(CosmosClientBuilder builder) {
this.configs = builder.configs();
this.serviceEndpoint = builder.getEndpoint();
this.keyOrResourceToken = builder.getKey();
this.connectionPolicy = builder.getConnectionPolicy();
this.desiredConsistencyLevel = builder.getConsistencyLevel();
this.permissions = builder.getPermissions();
this.cosmosAuthorizationTokenResolver = builder.getAuthorizationTokenResolver();
this.credential = builder.getCredential();
this.sessionCapturingOverride = builder.isSessionCapturingOverrideEnabled();
this.enableTransportClientSharing = builder.isConnectionSharingAcrossClientsEnabled();
this.contentResponseOnWriteEnabled = builder.isContentResponseOnWriteEnabled();
this.tracerProvider = new TracerProvider(TRACER);
this.asyncDocumentClient = new AsyncDocumentClient.Builder()
.withServiceEndpoint(this.serviceEndpoint)
.withMasterKeyOrResourceToken(this.keyOrResourceToken)
.withConnectionPolicy(this.connectionPolicy)
.withConsistencyLevel(this.desiredConsistencyLevel)
.withSessionCapturingOverride(this.sessionCapturingOverride)
.withConfigs(this.configs)
.withTokenResolver(this.cosmosAuthorizationTokenResolver)
.withCredential(this.credential)
.withTransportClientSharing(this.enableTransportClientSharing)
.withContentResponseOnWriteEnabled(this.contentResponseOnWriteEnabled)
.build();
}
AsyncDocumentClient getContextClient() {
return this.asyncDocumentClient;
}
/**
* Monitor Cosmos client performance and resource utilization using the specified meter registry.
*
* @param registry meter registry to use for performance monitoring.
*/
static void setMonitorTelemetry(MeterRegistry registry) {
RntbdMetrics.add(registry);
}
/**
* Get the service endpoint.
*
* @return the service endpoint.
*/
String getServiceEndpoint() {
return serviceEndpoint;
}
/**
* Gets the key or resource token.
*
* @return get the key or resource token.
*/
String getKeyOrResourceToken() {
return keyOrResourceToken;
}
/**
* Get the connection policy.
*
* @return {@link ConnectionPolicy}.
*/
ConnectionPolicy getConnectionPolicy() {
return connectionPolicy;
}
/**
* Gets the consistency level.
*
* @return the {@link ConsistencyLevel}.
*/
ConsistencyLevel getDesiredConsistencyLevel() {
return desiredConsistencyLevel;
}
/**
* Gets the permission list.
*
* @return the permission list.
*/
List<CosmosPermissionProperties> getPermissions() {
return permissions;
}
AsyncDocumentClient getDocClientWrapper() {
return asyncDocumentClient;
}
/**
* Gets the configs.
*
* @return the configs.
*/
Configs getConfigs() {
return configs;
}
/**
* Gets the token resolver.
*
* @return the token resolver.
*/
CosmosAuthorizationTokenResolver getCosmosAuthorizationTokenResolver() {
return cosmosAuthorizationTokenResolver;
}
/**
* Gets the azure key credential.
*
* @return azure key credential.
*/
AzureKeyCredential credential() {
return credential;
}
/**
* Gets the boolean which indicates whether to only return the headers and status code in Cosmos DB response
* in case of Create, Update and Delete operations on CosmosItem.
*
* If set to false (which is by default), this removes the resource from response. It reduces networking
* and CPU load by not sending the resource back over the network and serializing it
* on the client.
*
* By-default, this is false.
*
* @return a boolean indicating whether resource will be included in the response or not.
*/
boolean isContentResponseOnWriteEnabled() {
return contentResponseOnWriteEnabled;
}
/**
* CREATE a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param databaseProperties CosmosDatabaseProperties.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(CosmosDatabaseProperties databaseProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(databaseProperties.getId()),
null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id of the database.
* @return a {@link Mono} containing the cosmos database response with the created or existing database or
* an error.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id), null, context));
}
/**
* Create a Database if it does not already exist on the service.
* <p>
* The throughputProperties will only be used if the specified database
* does not exist and therefor a new database will be created with throughputProperties.
* <p>
* The {@link Mono} upon successful completion will contain a single cosmos database response with the
* created or existing database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabaseIfNotExists(String id, ThroughputProperties throughputProperties) {
return withContext(context -> createDatabaseIfNotExistsInternal(getDatabase(id),
throughputProperties, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
CosmosDatabaseRequestOptions options) {
final CosmosDatabaseRequestOptions requestOptions = options == null ? new CosmosDatabaseRequestOptions() : options;
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties) {
return createDatabase(databaseProperties, new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param id id of the database.
* @return a {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id) {
return createDatabase(new CosmosDatabaseProperties(id), new CosmosDatabaseRequestOptions());
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @param options {@link CosmosDatabaseRequestOptions}.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties,
ThroughputProperties throughputProperties,
CosmosDatabaseRequestOptions options) {
if (options == null) {
options = new CosmosDatabaseRequestOptions();
}
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
Database wrappedDatabase = new Database();
wrappedDatabase.setId(databaseProperties.getId());
final CosmosDatabaseRequestOptions requestOptions = options;
return withContext(context -> createDatabaseInternal(wrappedDatabase, requestOptions, context));
}
/**
* Creates a database.
* <p>
* After subscription the operation will be performed.
* The {@link Mono} upon successful completion will contain a single resource response with the
* created database.
* In case of failure the {@link Mono} will error.
*
* @param databaseProperties {@link CosmosDatabaseProperties}.
* @param throughputProperties the throughput properties for the database.
* @return an {@link Mono} containing the single cosmos database response with the created database or an error.
*/
public Mono<CosmosDatabaseResponse> createDatabase(CosmosDatabaseProperties databaseProperties, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(databaseProperties, options);
}
/**
* Creates a database.
*
* @param id the id.
* @param throughputProperties the throughputProperties.
* @return the mono.
*/
public Mono<CosmosDatabaseResponse> createDatabase(String id, ThroughputProperties throughputProperties) {
CosmosDatabaseRequestOptions options = new CosmosDatabaseRequestOptions();
ModelBridgeInternal.setThroughputProperties(options, throughputProperties);
return createDatabase(new CosmosDatabaseProperties(id), options);
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param options {@link CosmosQueryRequestOptions}
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases(CosmosQueryRequestOptions options) {
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "readAllDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().readDatabases(options)
.map(response ->
BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
/**
* Reads all databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> readAllDatabases() {
return readAllDatabases(new CosmosQueryRequestOptions());
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param query the query.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
public CosmosPagedFlux<CosmosDatabaseProperties> queryDatabases(String query, CosmosQueryRequestOptions options) {
if (options == null) {
options = new CosmosQueryRequestOptions();
}
return queryDatabasesInternal(new SqlQuerySpec(query), options);
}
/**
* Query for databases.
* <p>
* After subscription the operation will be performed.
* The {@link CosmosPagedFlux} will contain one or several feed response of the read databases.
* In case of failure the {@link CosmosPagedFlux} will error.
*
* @param querySpec the SQL query specification.
* @param options the feed options.
* @return a {@link CosmosPagedFlux} containing one or several feed response pages of read databases or an error.
*/
/**
* Gets a database object without making a service call.
*
* @param id name of the database.
* @return {@link CosmosAsyncDatabase}.
*/
public CosmosAsyncDatabase getDatabase(String id) {
return new CosmosAsyncDatabase(id, this);
}
/**
* Close this {@link CosmosAsyncClient} instance and cleans up the resources.
*/
@Override
public void close() {
asyncDocumentClient.close();
}
TracerProvider getTracerProvider(){
return this.tracerProvider;
}
private CosmosPagedFlux<CosmosDatabaseProperties> queryDatabasesInternal(SqlQuerySpec querySpec, CosmosQueryRequestOptions options){
return UtilBridgeInternal.createCosmosPagedFlux(pagedFluxOptions -> {
pagedFluxOptions.setTracerInformation(this.tracerProvider, "queryDatabases", this.serviceEndpoint, null);
setContinuationTokenAndMaxItemCount(pagedFluxOptions, options);
return getDocClientWrapper().queryDatabases(querySpec, options)
.map(response -> BridgeInternal.createFeedResponse(
ModelBridgeInternal.getCosmosDatabasePropertiesFromV2Results(response.getResults()),
response.getResponseHeaders()));
});
}
private Mono<CosmosDatabaseResponse> createDatabaseIfNotExistsInternal(CosmosAsyncDatabase database,
ThroughputProperties throughputProperties, Context context) {
String spanName = "createDatabaseIfNotExists." + database.getId();
Context nestedContext = context.addData(TracerProvider.COSMOS_CALL_DEPTH, TracerProvider.COSMOS_CALL_DEPTH_VAL);
Mono<CosmosDatabaseResponse> responseMono = database.readInternal(new CosmosDatabaseRequestOptions(),
nestedContext).onErrorResume(exception -> {
final Throwable unwrappedException = Exceptions.unwrap(exception);
if (unwrappedException instanceof CosmosException) {
final CosmosException cosmosException = (CosmosException) unwrappedException;
if (cosmosException.getStatusCode() == HttpConstants.StatusCodes.NOTFOUND) {
CosmosDatabaseRequestOptions requestOptions = new CosmosDatabaseRequestOptions();
if (throughputProperties != null) {
ModelBridgeInternal.setThroughputProperties(requestOptions, throughputProperties);
}
Database wrappedDatabase = new Database();
wrappedDatabase.setId(database.getId());
return createDatabaseInternal(wrappedDatabase,
requestOptions, nestedContext);
}
}
return Mono.error(unwrappedException);
});
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
private Mono<CosmosDatabaseResponse> createDatabaseInternal(Database database, CosmosDatabaseRequestOptions options,
Context context) {
String spanName = "createDatabase." + database.getId();
Mono<CosmosDatabaseResponse> responseMono = asyncDocumentClient.createDatabase(database, ModelBridgeInternal.toRequestOptions(options))
.map(databaseResourceResponse -> ModelBridgeInternal.createCosmosDatabaseResponse(databaseResourceResponse))
.single();
return tracerProvider.traceEnabledCosmosResponsePublisher(responseMono,
context,
spanName,
database.getId(),
this.serviceEndpoint);
}
} |
Can we avoid creating a new instance for same type here and also have a way to minimize duplicate instances when used by the user? Maybe have a static map of known types and vend those `TypeReference` instances if one exists or create one if it doesnt? | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true), | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} |
Basically create a backing map of static references for common built-in Java types such as `Boolean`, `Integer`, `Map<String, Object>`, etc? | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true), | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} |
Yeah, basically, support these built-in types out of the box and for custom types, lazily add to the backing map. | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true), | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} |
The best I can think of would be an API such as this. ```java <T> TypeReference<T> createInstance(Class<T> clazz); ``` With a static cache of `Map<Class<T>, TypeReference<T>>`. This won't be able to handle `ParameterizedType`. Is this something we would need to GA this feature or could we add it later? | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true), | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} |
Adding this after GA would lead to 2 different ways of creating an instance of `TypeReference` - ctor and static method. So, we have to finalize this before GA. | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true), | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} |
I would stick with using constructor as this matches patterns used in other similar concepts. | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true), | private static Stream<Arguments> deserializePrimitiveTypesSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), schemaCreator("boolean"), new TypeReference<Boolean>() { }, false),
Arguments.of(streamCreator(1), schemaCreator("boolean"), new TypeReference<Boolean>() { }, true),
Arguments.of(streamCreator(42), schemaCreator("int"), new TypeReference<Integer>() { }, 21),
Arguments.of(streamCreator(42), schemaCreator("long"), new TypeReference<Long>() { }, 21L),
Arguments.of(streamCreator(0x00, 0x00, 0x28, 0x42), schemaCreator("float"), new TypeReference<Float>() { }, 42F),
Arguments.of(streamCreator(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40), schemaCreator("double"),
new TypeReference<Double>() { }, 42D),
Arguments.of(streamCreator(0), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, ""),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_STRING_SCHEMA, new TypeReference<String>() { }, "foo"),
Arguments.of(streamCreator(0), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { }, new Utf8("")),
Arguments.of(streamCreator(0x06, 0x66, 0x6F, 0x6F), SPECIFIED_CHAR_SEQUENCE_SCHEMA, new TypeReference<Utf8>() { },
new Utf8("foo")),
Arguments.of(streamCreator(0), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[0])),
Arguments.of(streamCreator(4, 42, 42), schemaCreator("bytes"), new TypeReference<ByteBuffer>() { },
ByteBuffer.wrap(new byte[] {42, 42 }))
);
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} | class ApacheAvroSerializerTests {
/*
* This Avro schema specifies the Java string type that should be used to deserialize STRING. Without specifying
* 'String' the default is 'CharSequence' which ends up being wrapped in Apache's 'Utf8' class. Additionally, this
* can be set as a compile configuration.
*/
private static final String SPECIFIED_STRING_SCHEMA = "{\"type\": \"string\",\"avro.java.string\":\"String\"}";
private static final String SPECIFIED_CHAR_SEQUENCE_SCHEMA = "{\"type\": \"string\","
+ "\"avro.java.string\":\"CharSequence\"}";
private static final String INT_ARRAY_SCHEMA = "{\"type\":\"array\",\"items\":\"int\"}";
private static final String INT_MAP_SCHEMA = "{\"type\":\"map\",\"values\":\"int\","
+ "\"avro.java.string\":\"String\"}";
private static ApacheAvroSerializer getSerializer(String schema) {
return new ApacheAvroSerializerBuilder()
.schema(schema)
.build();
}
@ParameterizedTest
@MethodSource("deserializePrimitiveTypesSupplier")
public <T> void deserializePrimitiveTypes(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
@Test
public void deserializeNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(new ByteArrayInputStream(new byte[0]),
new TypeReference<Void>() { })).verifyComplete();
}
@ParameterizedTest
@MethodSource("deserializeEnumSupplier")
public void deserializeEnum(InputStream avro, PlayingCardSuit expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(avro,
new TypeReference<PlayingCardSuit>() { }))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeEnumSupplier() {
return Stream.of(
Arguments.of(streamCreator(0), PlayingCardSuit.SPADES),
Arguments.of(streamCreator(2), PlayingCardSuit.HEARTS),
Arguments.of(streamCreator(4), PlayingCardSuit.DIAMONDS),
Arguments.of(streamCreator(6), PlayingCardSuit.CLUBS)
);
}
@Test
public void deserializeInvalidEnum() {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString()).deserialize(streamCreator(8),
new TypeReference<PlayingCardSuit>() { }))
.verifyError();
}
@ParameterizedTest
@MethodSource("deserializeListAndMapSupplier")
public <T> void deserializeListAndMap(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeListAndMapSupplier() {
InputStream multiBlockMapAvro = streamCreator(
2, 0x06, 0x66, 0x6F, 0x6F, 2,
2, 0x06, 0x62, 0x61, 0x72, 4, 0
);
Map<String, Integer> expectedMultiBlockMap = new HashMap<>();
expectedMultiBlockMap.put("foo", 1);
expectedMultiBlockMap.put("bar", 2);
return Stream.of(
Arguments.of(streamCreator(0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { }, Collections.emptyList()),
Arguments.of(streamCreator(6, 20, 40, 60, 0), INT_ARRAY_SCHEMA, new TypeReference<List<Integer>>() { },
Arrays.asList(10, 20, 30)),
Arguments.of(streamCreator(0), INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { },
Collections.emptyMap()),
Arguments.of(streamCreator(2, 0x06, 0x66, 0x6F, 0x6F, 2, 0), INT_MAP_SCHEMA,
new TypeReference<Map<String, Integer>>() { }, Collections.singletonMap("foo", 1)),
Arguments.of(multiBlockMapAvro, INT_MAP_SCHEMA, new TypeReference<Map<String, Integer>>() { }, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("deserializeRecordSupplier")
public <T> void deserializeRecord(InputStream avro, String schema, TypeReference<T> type, T expected) {
StepVerifier.create(getSerializer(schema).deserialize(avro, type))
.assertNext(actual -> assertEquals(expected, actual))
.verifyComplete();
}
private static Stream<Arguments> deserializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
InputStream pairOfAcesHand = streamCreator(
4,
0, 2, 0,
0, 2, 6,
0
);
HandOfCards expectedPairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
InputStream royalFlushHand = streamCreator(
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
);
HandOfCards expectedRoyalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
InputStream twoNodeLinkedList = streamCreator(
0, 2,
2, 0
);
LongLinkedList expectedTwoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
return Stream.of(
Arguments.of(streamCreator(0), handOfCardsSchema, new TypeReference<HandOfCards>() { },
new HandOfCards(Collections.emptyList())),
Arguments.of(pairOfAcesHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedPairOfAces),
Arguments.of(royalFlushHand, handOfCardsSchema, new TypeReference<HandOfCards>() { }, expectedRoyalFlushHand),
Arguments.of(streamCreator(0, 0), longLinkedListSchema, new TypeReference<LongLinkedList>() { },
new LongLinkedList(0L, null)),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, new TypeReference<LongLinkedList>() { },
expectedTwoNodeLinkedList)
);
}
@Test
public void deserializeNullReturnsNull() {
StepVerifier.create(getSerializer(schemaCreator("null")).deserialize(null, new TypeReference<Void>() { }))
.verifyComplete();
}
@Test
public void deserializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
@ParameterizedTest
@MethodSource("simpleSerializationSupplier")
public void simpleSerialization(String schema, Object value, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), value))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> simpleSerializationSupplier() {
return Stream.of(
Arguments.of(schemaCreator("null"), null, new byte[0]),
Arguments.of(schemaCreator("boolean"), false, new byte[] { 0 }),
Arguments.of(schemaCreator("boolean"), true, new byte[] { 1 }),
Arguments.of(schemaCreator("int"), 21, new byte[] { 42 }),
Arguments.of(schemaCreator("long"), 21L, new byte[] { 42 }),
Arguments.of(schemaCreator("float"), 42F, new byte[] { 0x00, 0x00, 0x28, 0x42}),
Arguments.of(schemaCreator("double"), 42D, new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x45, 0x40 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "", new byte[] { 0 }),
Arguments.of(SPECIFIED_STRING_SCHEMA, "foo", new byte[] { 0x06, 0x66, 0x6F, 0x6F }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[0]), new byte[] { 0 }),
Arguments.of(schemaCreator("bytes"), ByteBuffer.wrap(new byte[] { 42, 42 }), new byte[] { 4, 42, 42 })
);
}
@ParameterizedTest
@MethodSource("serializeEnumSupplier")
public void serializeEnum(PlayingCardSuit playingCardSuit, byte[] expected) {
StepVerifier.create(getSerializer(PlayingCardSuit.getClassSchema().toString())
.serialize(new ByteArrayOutputStream(), playingCardSuit))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeEnumSupplier() {
return Stream.of(
Arguments.of(PlayingCardSuit.SPADES, new byte[] { 0 }),
Arguments.of(PlayingCardSuit.HEARTS, new byte[] { 2 }),
Arguments.of(PlayingCardSuit.DIAMONDS, new byte[] { 4 }),
Arguments.of(PlayingCardSuit.CLUBS, new byte[] { 6 })
);
}
@ParameterizedTest
@MethodSource("serializeListAndMapSupplier")
public void serializeListAndMap(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeListAndMapSupplier() {
Map<String, Integer> multiBlockMap = new HashMap<>();
multiBlockMap.put("foo", 1);
multiBlockMap.put("bar", 2);
byte[] expectedMultiBlockMap = new byte[] {
4, 0x06, 0x62, 0x61, 0x72, 4,
0x06, 0x66, 0x6F, 0x6F, 2, 0
};
return Stream.of(
Arguments.of(Collections.emptyList(), INT_ARRAY_SCHEMA, new byte[] { 0 }),
Arguments.of(Arrays.asList(10, 20, 30), INT_ARRAY_SCHEMA, new byte[] { 6, 20, 40, 60, 0 }),
Arguments.of(Collections.emptyMap(), INT_MAP_SCHEMA, new byte[] { 0 }),
Arguments.of(Collections.singletonMap("foo", 1), INT_MAP_SCHEMA,
new byte[] { 2, 0x06, 0x66, 0x6F, 0x6F, 2, 0 }),
Arguments.of(multiBlockMap, INT_MAP_SCHEMA, expectedMultiBlockMap)
);
}
@ParameterizedTest
@MethodSource("serializeRecordSupplier")
public void serializeRecord(Object obj, String schema, byte[] expected) {
StepVerifier.create(getSerializer(schema).serialize(new ByteArrayOutputStream(), obj))
.assertNext(actual -> {
assertNotNull(actual);
assertArrayEquals(expected, actual.toByteArray());
})
.verifyComplete();
}
private static Stream<Arguments> serializeRecordSupplier() {
String handOfCardsSchema = HandOfCards.getClassSchema().toString();
HandOfCards pairOfAces = new HandOfCards(Arrays.asList(
new PlayingCard(false, 1, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.CLUBS)
));
byte[] expectedPairOfAcesAvro = new byte[] {
4,
0, 2, 0,
0, 2, 6,
0
};
HandOfCards royalFlushHand = new HandOfCards(Arrays.asList(
new PlayingCard(false, 10, PlayingCardSuit.SPADES),
new PlayingCard(true, 11, PlayingCardSuit.SPADES),
new PlayingCard(true, 12, PlayingCardSuit.SPADES),
new PlayingCard(true, 13, PlayingCardSuit.SPADES),
new PlayingCard(false, 1, PlayingCardSuit.SPADES)
));
byte[] expectedRoyalFlushHandAvro = new byte[] {
10,
0, 20, 0,
1, 22, 0,
1, 24, 0,
1, 26, 0,
0, 2, 0,
0
};
String longLinkedListSchema = LongLinkedList.getClassSchema().toString();
LongLinkedList twoNodeLinkedList = new LongLinkedList(0L, new LongLinkedList(1L, null));
byte[] expectedTwoNodeLinkedListAvro = new byte[] {
0, 2,
2, 0
};
return Stream.of(
Arguments.of(new HandOfCards(Collections.emptyList()), handOfCardsSchema, new byte[] { 0 }),
Arguments.of(pairOfAces, handOfCardsSchema, expectedPairOfAcesAvro),
Arguments.of(royalFlushHand, handOfCardsSchema, expectedRoyalFlushHandAvro),
Arguments.of(new LongLinkedList(0L, null), longLinkedListSchema, new byte[] { 0, 0 }),
Arguments.of(twoNodeLinkedList, longLinkedListSchema, expectedTwoNodeLinkedListAvro)
);
}
@Test
public void serializeNullSchemaThrows() {
assertThrows(NullPointerException.class, () -> getSerializer(null));
}
private static String schemaCreator(String type) {
return String.format("{\"type\" : \"%s\"}", type);
}
private static InputStream streamCreator(int... bytes) {
byte[] converted = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
converted[i] = (byte) bytes[i];
}
return new ByteArrayInputStream(converted);
}
} |
Do we need mayHaveBody check , what happen if we clear on all cancel irrespective of body, we can avoid extra check ? | private void releaseAfterCancel(HttpMethod method) {
if (mayHaveBody(method) && this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) {
if (logger.isDebugEnabled()) {
logger.debug("Releasing body, not yet subscribed");
}
this.bodyIntern()
.doOnNext(byteBuf -> {})
.subscribe(byteBuf -> {}, ex -> {});
}
} | if (mayHaveBody(method) && this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) { | private void releaseAfterCancel(HttpMethod method) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) {
if (logger.isDebugEnabled()) {
logger.debug("Releasing body, not yet subscribed");
}
this.bodyIntern()
.doOnNext(byteBuf -> {})
.subscribe(byteBuf -> {}, ex -> {});
}
} | class ReactorNettyHttpResponse extends HttpResponse {
private final AtomicReference<ReactorNettyResponseState> state = new AtomicReference<>(ReactorNettyResponseState.NOT_SUBSCRIBED);
private final HttpClientResponse reactorNettyResponse;
private final Connection reactorNettyConnection;
ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection) {
this.reactorNettyResponse = reactorNettyResponse;
this.reactorNettyConnection = reactorNettyConnection;
}
@Override
public int statusCode() {
return reactorNettyResponse.status().code();
}
@Override
public String headerValue(String name) {
return reactorNettyResponse.responseHeaders().get(name);
}
@Override
public HttpHeaders headers() {
HttpHeaders headers = new HttpHeaders(reactorNettyResponse.responseHeaders().size());
reactorNettyResponse.responseHeaders().forEach(e -> headers.set(e.getKey(), e.getValue()));
return headers;
}
@Override
public Flux<ByteBuf> body() {
return bodyIntern()
.doOnSubscribe(this::updateSubscriptionState)
.map(byteBuf -> {
byteBuf.retain();
return byteBuf;
});
}
@Override
public Mono<byte[]> bodyAsByteArray() {
return bodyIntern().aggregate()
.asByteArray()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString() {
return bodyIntern().aggregate()
.asString()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString(Charset charset) {
return bodyIntern().aggregate()
.asString(charset)
.doOnSubscribe(this::updateSubscriptionState);
}
private ByteBufFlux bodyIntern() {
return reactorNettyConnection.inbound().receive();
}
@Override
Connection internConnection() {
return reactorNettyConnection;
}
private void updateSubscriptionState(Subscription subscription) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.SUBSCRIBED)) {
return;
}
if (this.state.get() == ReactorNettyResponseState.CANCELLED) {
throw new IllegalStateException(
"The client response body has been released already due to cancellation.");
}
}
/**
* Called by {@link ReactorNettyClient} when a cancellation is detected
* but the content has not been subscribed to. If the subscription never
* materializes then the content will remain not drained. Or it could still
* materialize if the cancellation happened very early, or the response
* reading was delayed for some reason.
*/
private boolean mayHaveBody(HttpMethod method) {
int code = this.statusCode();
return !((code >= 100 && code < 200) || code == 204 || code == 205 ||
method.equals(HttpMethod.HEAD) || headers().getContentLength() == 0);
}
} | class ReactorNettyHttpResponse extends HttpResponse {
private final AtomicReference<ReactorNettyResponseState> state = new AtomicReference<>(ReactorNettyResponseState.NOT_SUBSCRIBED);
private final HttpClientResponse reactorNettyResponse;
private final Connection reactorNettyConnection;
ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection) {
this.reactorNettyResponse = reactorNettyResponse;
this.reactorNettyConnection = reactorNettyConnection;
}
@Override
public int statusCode() {
return reactorNettyResponse.status().code();
}
@Override
public String headerValue(String name) {
return reactorNettyResponse.responseHeaders().get(name);
}
@Override
public HttpHeaders headers() {
HttpHeaders headers = new HttpHeaders(reactorNettyResponse.responseHeaders().size());
reactorNettyResponse.responseHeaders().forEach(e -> headers.set(e.getKey(), e.getValue()));
return headers;
}
@Override
public Flux<ByteBuf> body() {
return bodyIntern()
.doOnSubscribe(this::updateSubscriptionState)
.map(byteBuf -> {
byteBuf.retain();
return byteBuf;
});
}
@Override
public Mono<byte[]> bodyAsByteArray() {
return bodyIntern().aggregate()
.asByteArray()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString() {
return bodyIntern().aggregate()
.asString()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString(Charset charset) {
return bodyIntern().aggregate()
.asString(charset)
.doOnSubscribe(this::updateSubscriptionState);
}
private ByteBufFlux bodyIntern() {
return reactorNettyConnection.inbound().receive();
}
@Override
Connection internConnection() {
return reactorNettyConnection;
}
private void updateSubscriptionState(Subscription subscription) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.SUBSCRIBED)) {
return;
}
if (this.state.get() == ReactorNettyResponseState.CANCELLED) {
throw new IllegalStateException(
"The client response body has been released already due to cancellation.");
}
}
/**
* Called by {@link ReactorNettyClient} when a cancellation is detected
* but the content has not been subscribed to. If the subscription never
* materializes then the content will remain not drained. Or it could still
* materialize if the cancellation happened very early, or the response
* reading was delayed for some reason.
*/
} |
Since we are draining content here, we want to make sure we drain it under very specific conditions, specially when the body can be present. | private void releaseAfterCancel(HttpMethod method) {
if (mayHaveBody(method) && this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) {
if (logger.isDebugEnabled()) {
logger.debug("Releasing body, not yet subscribed");
}
this.bodyIntern()
.doOnNext(byteBuf -> {})
.subscribe(byteBuf -> {}, ex -> {});
}
} | if (mayHaveBody(method) && this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) { | private void releaseAfterCancel(HttpMethod method) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) {
if (logger.isDebugEnabled()) {
logger.debug("Releasing body, not yet subscribed");
}
this.bodyIntern()
.doOnNext(byteBuf -> {})
.subscribe(byteBuf -> {}, ex -> {});
}
} | class ReactorNettyHttpResponse extends HttpResponse {
private final AtomicReference<ReactorNettyResponseState> state = new AtomicReference<>(ReactorNettyResponseState.NOT_SUBSCRIBED);
private final HttpClientResponse reactorNettyResponse;
private final Connection reactorNettyConnection;
ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection) {
this.reactorNettyResponse = reactorNettyResponse;
this.reactorNettyConnection = reactorNettyConnection;
}
@Override
public int statusCode() {
return reactorNettyResponse.status().code();
}
@Override
public String headerValue(String name) {
return reactorNettyResponse.responseHeaders().get(name);
}
@Override
public HttpHeaders headers() {
HttpHeaders headers = new HttpHeaders(reactorNettyResponse.responseHeaders().size());
reactorNettyResponse.responseHeaders().forEach(e -> headers.set(e.getKey(), e.getValue()));
return headers;
}
@Override
public Flux<ByteBuf> body() {
return bodyIntern()
.doOnSubscribe(this::updateSubscriptionState)
.map(byteBuf -> {
byteBuf.retain();
return byteBuf;
});
}
@Override
public Mono<byte[]> bodyAsByteArray() {
return bodyIntern().aggregate()
.asByteArray()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString() {
return bodyIntern().aggregate()
.asString()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString(Charset charset) {
return bodyIntern().aggregate()
.asString(charset)
.doOnSubscribe(this::updateSubscriptionState);
}
private ByteBufFlux bodyIntern() {
return reactorNettyConnection.inbound().receive();
}
@Override
Connection internConnection() {
return reactorNettyConnection;
}
private void updateSubscriptionState(Subscription subscription) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.SUBSCRIBED)) {
return;
}
if (this.state.get() == ReactorNettyResponseState.CANCELLED) {
throw new IllegalStateException(
"The client response body has been released already due to cancellation.");
}
}
/**
* Called by {@link ReactorNettyClient} when a cancellation is detected
* but the content has not been subscribed to. If the subscription never
* materializes then the content will remain not drained. Or it could still
* materialize if the cancellation happened very early, or the response
* reading was delayed for some reason.
*/
private boolean mayHaveBody(HttpMethod method) {
int code = this.statusCode();
return !((code >= 100 && code < 200) || code == 204 || code == 205 ||
method.equals(HttpMethod.HEAD) || headers().getContentLength() == 0);
}
} | class ReactorNettyHttpResponse extends HttpResponse {
private final AtomicReference<ReactorNettyResponseState> state = new AtomicReference<>(ReactorNettyResponseState.NOT_SUBSCRIBED);
private final HttpClientResponse reactorNettyResponse;
private final Connection reactorNettyConnection;
ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection) {
this.reactorNettyResponse = reactorNettyResponse;
this.reactorNettyConnection = reactorNettyConnection;
}
@Override
public int statusCode() {
return reactorNettyResponse.status().code();
}
@Override
public String headerValue(String name) {
return reactorNettyResponse.responseHeaders().get(name);
}
@Override
public HttpHeaders headers() {
HttpHeaders headers = new HttpHeaders(reactorNettyResponse.responseHeaders().size());
reactorNettyResponse.responseHeaders().forEach(e -> headers.set(e.getKey(), e.getValue()));
return headers;
}
@Override
public Flux<ByteBuf> body() {
return bodyIntern()
.doOnSubscribe(this::updateSubscriptionState)
.map(byteBuf -> {
byteBuf.retain();
return byteBuf;
});
}
@Override
public Mono<byte[]> bodyAsByteArray() {
return bodyIntern().aggregate()
.asByteArray()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString() {
return bodyIntern().aggregate()
.asString()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString(Charset charset) {
return bodyIntern().aggregate()
.asString(charset)
.doOnSubscribe(this::updateSubscriptionState);
}
private ByteBufFlux bodyIntern() {
return reactorNettyConnection.inbound().receive();
}
@Override
Connection internConnection() {
return reactorNettyConnection;
}
private void updateSubscriptionState(Subscription subscription) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.SUBSCRIBED)) {
return;
}
if (this.state.get() == ReactorNettyResponseState.CANCELLED) {
throw new IllegalStateException(
"The client response body has been released already due to cancellation.");
}
}
/**
* Called by {@link ReactorNettyClient} when a cancellation is detected
* but the content has not been subscribed to. If the subscription never
* materializes then the content will remain not drained. Or it could still
* materialize if the cancellation happened very early, or the response
* reading was delayed for some reason.
*/
} |
That is fine ,but my doubt is if some valid response miss mayHaveBody (due to any missed scenario), then we will still face issue , vs draining non body too along with body response (Its a trade off thing ) | private void releaseAfterCancel(HttpMethod method) {
if (mayHaveBody(method) && this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) {
if (logger.isDebugEnabled()) {
logger.debug("Releasing body, not yet subscribed");
}
this.bodyIntern()
.doOnNext(byteBuf -> {})
.subscribe(byteBuf -> {}, ex -> {});
}
} | if (mayHaveBody(method) && this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) { | private void releaseAfterCancel(HttpMethod method) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.CANCELLED)) {
if (logger.isDebugEnabled()) {
logger.debug("Releasing body, not yet subscribed");
}
this.bodyIntern()
.doOnNext(byteBuf -> {})
.subscribe(byteBuf -> {}, ex -> {});
}
} | class ReactorNettyHttpResponse extends HttpResponse {
private final AtomicReference<ReactorNettyResponseState> state = new AtomicReference<>(ReactorNettyResponseState.NOT_SUBSCRIBED);
private final HttpClientResponse reactorNettyResponse;
private final Connection reactorNettyConnection;
ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection) {
this.reactorNettyResponse = reactorNettyResponse;
this.reactorNettyConnection = reactorNettyConnection;
}
@Override
public int statusCode() {
return reactorNettyResponse.status().code();
}
@Override
public String headerValue(String name) {
return reactorNettyResponse.responseHeaders().get(name);
}
@Override
public HttpHeaders headers() {
HttpHeaders headers = new HttpHeaders(reactorNettyResponse.responseHeaders().size());
reactorNettyResponse.responseHeaders().forEach(e -> headers.set(e.getKey(), e.getValue()));
return headers;
}
@Override
public Flux<ByteBuf> body() {
return bodyIntern()
.doOnSubscribe(this::updateSubscriptionState)
.map(byteBuf -> {
byteBuf.retain();
return byteBuf;
});
}
@Override
public Mono<byte[]> bodyAsByteArray() {
return bodyIntern().aggregate()
.asByteArray()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString() {
return bodyIntern().aggregate()
.asString()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString(Charset charset) {
return bodyIntern().aggregate()
.asString(charset)
.doOnSubscribe(this::updateSubscriptionState);
}
private ByteBufFlux bodyIntern() {
return reactorNettyConnection.inbound().receive();
}
@Override
Connection internConnection() {
return reactorNettyConnection;
}
private void updateSubscriptionState(Subscription subscription) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.SUBSCRIBED)) {
return;
}
if (this.state.get() == ReactorNettyResponseState.CANCELLED) {
throw new IllegalStateException(
"The client response body has been released already due to cancellation.");
}
}
/**
* Called by {@link ReactorNettyClient} when a cancellation is detected
* but the content has not been subscribed to. If the subscription never
* materializes then the content will remain not drained. Or it could still
* materialize if the cancellation happened very early, or the response
* reading was delayed for some reason.
*/
private boolean mayHaveBody(HttpMethod method) {
int code = this.statusCode();
return !((code >= 100 && code < 200) || code == 204 || code == 205 ||
method.equals(HttpMethod.HEAD) || headers().getContentLength() == 0);
}
} | class ReactorNettyHttpResponse extends HttpResponse {
private final AtomicReference<ReactorNettyResponseState> state = new AtomicReference<>(ReactorNettyResponseState.NOT_SUBSCRIBED);
private final HttpClientResponse reactorNettyResponse;
private final Connection reactorNettyConnection;
ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection) {
this.reactorNettyResponse = reactorNettyResponse;
this.reactorNettyConnection = reactorNettyConnection;
}
@Override
public int statusCode() {
return reactorNettyResponse.status().code();
}
@Override
public String headerValue(String name) {
return reactorNettyResponse.responseHeaders().get(name);
}
@Override
public HttpHeaders headers() {
HttpHeaders headers = new HttpHeaders(reactorNettyResponse.responseHeaders().size());
reactorNettyResponse.responseHeaders().forEach(e -> headers.set(e.getKey(), e.getValue()));
return headers;
}
@Override
public Flux<ByteBuf> body() {
return bodyIntern()
.doOnSubscribe(this::updateSubscriptionState)
.map(byteBuf -> {
byteBuf.retain();
return byteBuf;
});
}
@Override
public Mono<byte[]> bodyAsByteArray() {
return bodyIntern().aggregate()
.asByteArray()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString() {
return bodyIntern().aggregate()
.asString()
.doOnSubscribe(this::updateSubscriptionState);
}
@Override
public Mono<String> bodyAsString(Charset charset) {
return bodyIntern().aggregate()
.asString(charset)
.doOnSubscribe(this::updateSubscriptionState);
}
private ByteBufFlux bodyIntern() {
return reactorNettyConnection.inbound().receive();
}
@Override
Connection internConnection() {
return reactorNettyConnection;
}
private void updateSubscriptionState(Subscription subscription) {
if (this.state.compareAndSet(ReactorNettyResponseState.NOT_SUBSCRIBED, ReactorNettyResponseState.SUBSCRIBED)) {
return;
}
if (this.state.get() == ReactorNettyResponseState.CANCELLED) {
throw new IllegalStateException(
"The client response body has been released already due to cancellation.");
}
}
/**
* Called by {@link ReactorNettyClient} when a cancellation is detected
* but the content has not been subscribed to. If the subscription never
* materializes then the content will remain not drained. Or it could still
* materialize if the cancellation happened very early, or the response
* reading was delayed for some reason.
*/
} |
Does GSON maintain full method names or does it drop Java bean notation? I know in Jackson's case this would serialize as `hotelName` (possibly `HotelName` as I'm not completely certain about how it handles casing). | public void testPropertyNameOnMethodName() throws NoSuchMethodException {
class Hotel {
String hotelName;
public String getHotelName() {
return hotelName;
}
}
Method m = Hotel.class.getDeclaredMethod("getHotelName");
assertMemberValue(m, "getHotelName");
} | assertMemberValue(m, "getHotelName"); | public void testPropertyNameOnMethodName() throws NoSuchMethodException {
class LocalHotel {
String hotelName;
public String getHotelName() {
return hotelName;
}
}
Method m = LocalHotel.class.getDeclaredMethod("getHotelName");
assertNull(serializer.convertMemberName(m));
} | class Hotel {
@SerializedName(value = "")
String hotelName;
} | class LocalHotel {
@SerializedName(value = "")
String hotelName;
} |
This won't be how Jackson default handles a `JsonProperty` annotated method, it'll attempt to remove the Java bean prefix of `get` or `is`. | public String convertMemberName(Member member) {
if (member instanceof Field) {
Field f = (Field) member;
if (f.isAnnotationPresent(JsonIgnore.class)) {
return null;
}
if (f.isAnnotationPresent(JsonProperty.class)) {
String propertyName = f.getDeclaredAnnotation(JsonProperty.class).value();
return CoreUtils.isNullOrEmpty(propertyName) ? f.getName() : propertyName;
}
}
if (member instanceof Method) {
Method m = (Method) member;
if (m.isAnnotationPresent(JsonIgnore.class)) {
return null;
}
if (m.isAnnotationPresent(JsonProperty.class)) {
String propertyName = m.getDeclaredAnnotation(JsonProperty.class).value();
return CoreUtils.isNullOrEmpty(propertyName) ? m.getName() : propertyName;
}
}
return member.getName();
} | return member.getName(); | public String convertMemberName(Member member) {
if (Modifier.isTransient(member.getModifiers())) {
return null;
}
if (member instanceof Field) {
Field f = (Field) member;
if (f.isAnnotationPresent(JsonIgnore.class)) {
return null;
}
if (f.isAnnotationPresent(JsonProperty.class)) {
String propertyName = f.getDeclaredAnnotation(JsonProperty.class).value();
return CoreUtils.isNullOrEmpty(propertyName) ? f.getName() : propertyName;
}
return member.getName();
}
if (member instanceof Method) {
Method m = (Method) member;
if (m.isAnnotationPresent(JsonIgnore.class)) {
return null;
}
if (m.isAnnotationPresent(JsonProperty.class)) {
String propertyName = m.getDeclaredAnnotation(JsonProperty.class).value();
return CoreUtils.isNullOrEmpty(propertyName) ? m.getName() : propertyName;
}
return member.getName();
}
return null;
} | class JacksonJsonSerializer implements MemberNameConverter, JsonSerializer {
private final ClientLogger logger = new ClientLogger(JacksonJsonSerializer.class);
private final ObjectMapper mapper;
private final TypeFactory typeFactory;
/**
* Constructs a {@link JsonSerializer} using the passed Jackson serializer.
*
* @param mapper Configured Jackson serializer.
*/
JacksonJsonSerializer(ObjectMapper mapper) {
this.mapper = mapper;
this.typeFactory = mapper.getTypeFactory();
}
@Override
public <T> T deserialize(InputStream stream, TypeReference<T> typeReference) {
if (stream == null) {
return null;
}
try {
return mapper.readValue(stream, typeFactory.constructType(typeReference.getJavaType()));
} catch (IOException ex) {
throw logger.logExceptionAsError(new UncheckedIOException(ex));
}
}
@Override
public <T> Mono<T> deserializeAsync(InputStream stream, TypeReference<T> typeReference) {
return Mono.fromCallable(() -> deserialize(stream, typeReference));
}
@Override
public <S extends OutputStream> S serialize(S stream, Object value) {
try {
mapper.writeValue(stream, value);
} catch (IOException ex) {
throw logger.logExceptionAsError(new UncheckedIOException(ex));
}
return stream;
}
@Override
public <S extends OutputStream> Mono<S> serializeAsync(S stream, Object value) {
return Mono.fromCallable(() -> serialize(stream, value));
}
@Override
} | class JacksonJsonSerializer implements MemberNameConverter, JsonSerializer {
private final ClientLogger logger = new ClientLogger(JacksonJsonSerializer.class);
private final ObjectMapper mapper;
private final TypeFactory typeFactory;
/**
* Constructs a {@link JsonSerializer} using the passed Jackson serializer.
*
* @param mapper Configured Jackson serializer.
*/
JacksonJsonSerializer(ObjectMapper mapper) {
this.mapper = mapper;
this.typeFactory = mapper.getTypeFactory();
}
@Override
public <T> T deserialize(InputStream stream, TypeReference<T> typeReference) {
if (stream == null) {
return null;
}
try {
return mapper.readValue(stream, typeFactory.constructType(typeReference.getJavaType()));
} catch (IOException ex) {
throw logger.logExceptionAsError(new UncheckedIOException(ex));
}
}
@Override
public <T> Mono<T> deserializeAsync(InputStream stream, TypeReference<T> typeReference) {
return Mono.fromCallable(() -> deserialize(stream, typeReference));
}
@Override
public void serialize(OutputStream stream, Object value) {
try {
mapper.writeValue(stream, value);
} catch (IOException ex) {
throw logger.logExceptionAsError(new UncheckedIOException(ex));
}
}
@Override
public Mono<Void> serializeAsync(OutputStream stream, Object value) {
return Mono.fromRunnable(() -> serialize(stream, value));
}
@Override
} |
Jackson can serialize property name invoked getter even it is not in a format of `get{PropertyName}`. JavaBeans has limitation on reading this. In order to achieve what Jackson does, we have to introduce a really complicated logic in core, which also bring the risk in core. Talked offline, we can leave the function in future release, also we can have the logic inside of client SDK library first. | public String convertMemberName(Member member) {
if (member instanceof Field) {
Field f = (Field) member;
if (f.isAnnotationPresent(JsonIgnore.class)) {
return null;
}
if (f.isAnnotationPresent(JsonProperty.class)) {
String propertyName = f.getDeclaredAnnotation(JsonProperty.class).value();
return CoreUtils.isNullOrEmpty(propertyName) ? f.getName() : propertyName;
}
}
if (member instanceof Method) {
Method m = (Method) member;
if (m.isAnnotationPresent(JsonIgnore.class)) {
return null;
}
if (m.isAnnotationPresent(JsonProperty.class)) {
String propertyName = m.getDeclaredAnnotation(JsonProperty.class).value();
return CoreUtils.isNullOrEmpty(propertyName) ? m.getName() : propertyName;
}
}
return member.getName();
} | return member.getName(); | public String convertMemberName(Member member) {
if (Modifier.isTransient(member.getModifiers())) {
return null;
}
if (member instanceof Field) {
Field f = (Field) member;
if (f.isAnnotationPresent(JsonIgnore.class)) {
return null;
}
if (f.isAnnotationPresent(JsonProperty.class)) {
String propertyName = f.getDeclaredAnnotation(JsonProperty.class).value();
return CoreUtils.isNullOrEmpty(propertyName) ? f.getName() : propertyName;
}
return member.getName();
}
if (member instanceof Method) {
Method m = (Method) member;
if (m.isAnnotationPresent(JsonIgnore.class)) {
return null;
}
if (m.isAnnotationPresent(JsonProperty.class)) {
String propertyName = m.getDeclaredAnnotation(JsonProperty.class).value();
return CoreUtils.isNullOrEmpty(propertyName) ? m.getName() : propertyName;
}
return member.getName();
}
return null;
} | class JacksonJsonSerializer implements MemberNameConverter, JsonSerializer {
private final ClientLogger logger = new ClientLogger(JacksonJsonSerializer.class);
private final ObjectMapper mapper;
private final TypeFactory typeFactory;
/**
* Constructs a {@link JsonSerializer} using the passed Jackson serializer.
*
* @param mapper Configured Jackson serializer.
*/
JacksonJsonSerializer(ObjectMapper mapper) {
this.mapper = mapper;
this.typeFactory = mapper.getTypeFactory();
}
@Override
public <T> T deserialize(InputStream stream, TypeReference<T> typeReference) {
if (stream == null) {
return null;
}
try {
return mapper.readValue(stream, typeFactory.constructType(typeReference.getJavaType()));
} catch (IOException ex) {
throw logger.logExceptionAsError(new UncheckedIOException(ex));
}
}
@Override
public <T> Mono<T> deserializeAsync(InputStream stream, TypeReference<T> typeReference) {
return Mono.fromCallable(() -> deserialize(stream, typeReference));
}
@Override
public <S extends OutputStream> S serialize(S stream, Object value) {
try {
mapper.writeValue(stream, value);
} catch (IOException ex) {
throw logger.logExceptionAsError(new UncheckedIOException(ex));
}
return stream;
}
@Override
public <S extends OutputStream> Mono<S> serializeAsync(S stream, Object value) {
return Mono.fromCallable(() -> serialize(stream, value));
}
@Override
} | class JacksonJsonSerializer implements MemberNameConverter, JsonSerializer {
private final ClientLogger logger = new ClientLogger(JacksonJsonSerializer.class);
private final ObjectMapper mapper;
private final TypeFactory typeFactory;
/**
* Constructs a {@link JsonSerializer} using the passed Jackson serializer.
*
* @param mapper Configured Jackson serializer.
*/
JacksonJsonSerializer(ObjectMapper mapper) {
this.mapper = mapper;
this.typeFactory = mapper.getTypeFactory();
}
@Override
public <T> T deserialize(InputStream stream, TypeReference<T> typeReference) {
if (stream == null) {
return null;
}
try {
return mapper.readValue(stream, typeFactory.constructType(typeReference.getJavaType()));
} catch (IOException ex) {
throw logger.logExceptionAsError(new UncheckedIOException(ex));
}
}
@Override
public <T> Mono<T> deserializeAsync(InputStream stream, TypeReference<T> typeReference) {
return Mono.fromCallable(() -> deserialize(stream, typeReference));
}
@Override
public void serialize(OutputStream stream, Object value) {
try {
mapper.writeValue(stream, value);
} catch (IOException ex) {
throw logger.logExceptionAsError(new UncheckedIOException(ex));
}
}
@Override
public Mono<Void> serializeAsync(OutputStream stream, Object value) {
return Mono.fromRunnable(() -> serialize(stream, value));
}
@Override
} |
Should there be validation to check if it is in the correct format? | public void supplementalResponseStatisticsList() throws Exception {
ClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics();
for (int i = 0; i < 15; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
ObjectMapper objectMapper = new ObjectMapper();
String diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
JsonNode jsonNode = objectMapper.readTree(diagnostics);
ArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(15);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);
clearStoreResponseStatistics(clientSideRequestStatistics);
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
assertThat(storeResponseStatistics.size()).isEqualTo(0);
for (int i = 0; i < 7; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
objectMapper = new ObjectMapper();
diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
jsonNode = objectMapper.readTree(diagnostics);
supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(7);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);
for(JsonNode node : supplementalResponseStatisticsListNode) {
assertThat(node.get("storeResult")).isNotNull();
assertThat(node.get("requestResponseTimeUTC")).isNotNull();
assertThat(node.get("requestOperationType")).isNotNull();
assertThat(node.get("requestOperationType")).isNotNull();
}
} | assertThat(node.get("requestResponseTimeUTC")).isNotNull(); | public void supplementalResponseStatisticsList() throws Exception {
ClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics();
for (int i = 0; i < 15; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
ObjectMapper objectMapper = new ObjectMapper();
String diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
JsonNode jsonNode = objectMapper.readTree(diagnostics);
ArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(15);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);
clearStoreResponseStatistics(clientSideRequestStatistics);
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
assertThat(storeResponseStatistics.size()).isEqualTo(0);
for (int i = 0; i < 7; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
objectMapper = new ObjectMapper();
diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
jsonNode = objectMapper.readTree(diagnostics);
supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(7);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);
for(JsonNode node : supplementalResponseStatisticsListNode) {
assertThat(node.get("storeResult").asText()).isNotNull();
String requestResponseTimeUTC = node.get("requestResponseTimeUTC").asText();
String formattedInstant = RESPONSE_TIME_FORMATTER.format(Instant.now());
String[] requestResponseTimeUTCList = requestResponseTimeUTC.split(" ");
String[] formattedInstantList = formattedInstant.split(" ");
assertThat(requestResponseTimeUTC.length()).isEqualTo(formattedInstant.length());
assertThat(requestResponseTimeUTCList.length).isEqualTo(formattedInstantList.length);
assertThat(requestResponseTimeUTCList[0]).isEqualTo(formattedInstantList[0]);
assertThat(requestResponseTimeUTCList[1]).isEqualTo(formattedInstantList[1]);
assertThat(requestResponseTimeUTCList[2]).isEqualTo(formattedInstantList[2]);
assertThat(node.get("requestResponseTimeUTC")).isNotNull();
assertThat(node.get("requestOperationType")).isNotNull();
assertThat(node.get("requestOperationType")).isNotNull();
}
} | class CosmosDiagnosticsTest extends TestSuiteBase {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private CosmosClient gatewayClient;
private CosmosClient directClient;
private CosmosContainer container;
private CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() throws Exception {
assertThat(this.gatewayClient).isNull();
gatewayClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
directClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());
container = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.gatewayClient).isNotNull();
this.gatewayClient.close();
if (this.directClient != null) {
this.directClient.close();
}
}
@Test(groups = {"simple"})
public void gatewayDiagnostics() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"operationType\":\"Create\"");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void gatewayDiagnosticsOnException() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
container.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"statusCode\":404");
assertThat(diagnostics).contains("\"operationType\":\"Read\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
public void systemDiagnosticsForSystemStateInformation() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("systemInformation");
assertThat(diagnostics).contains("usedMemory");
assertThat(diagnostics).contains("availableMemory");
assertThat(diagnostics).contains("processCpuLoad");
assertThat(diagnostics).contains("systemCpuLoad");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
}
@Test(groups = {"simple"})
public void directDiagnostics() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = cosmosContainer.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(diagnostics).contains("supplementalResponseStatisticsList");
assertThat(diagnostics).contains("\"gatewayStatistics\":null");
assertThat(diagnostics).contains("addressResolutionStatistics");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"PARTITION_KEY_RANGE_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"SERVER_ADDRESS_LOOKUP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineDirect(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void directDiagnosticsOnException() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
cosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
@Test(groups = {"simple"})
public void serializationOnVariousScenarios() {
CosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();
String diagnostics = cosmosDatabase.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"DATABASE_DESERIALIZATION\"");
CosmosContainerResponse containerResponse = this.container.read();
diagnostics = containerResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"CONTAINER_DESERIALIZATION\"");
TestItem testItem = new TestItem();
testItem.id = "TestId";
testItem.mypk = "TestPk";
CosmosItemResponse<TestItem> itemResponse = this.container.createItem(testItem);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
testItem.id = "TestId2";
testItem.mypk = "TestPk";
itemResponse = this.container.createItem(testItem, new PartitionKey("TestPk"), null);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).doesNotContain("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).doesNotContain("\"serializationType\":\"ITEM_DESERIALIZATION\"");
TestItem readTestItem = itemResponse.getItem();
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
CosmosItemResponse<InternalObjectNode> readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);
InternalObjectNode properties = readItemResponse.getItem();
diagnostics = readItemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
}
private InternalObjectNode getInternalObjectNode() {
InternalObjectNode internalObjectNode = new InternalObjectNode();
internalObjectNode.setId(UUID.randomUUID().toString());
BridgeInternal.setProperty(internalObjectNode, "mypk", "test");
return internalObjectNode;
}
private List<ClientSideRequestStatistics.StoreResponseStatistics> getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
@SuppressWarnings({"unchecked"})
List<ClientSideRequestStatistics.StoreResponseStatistics> list
= (List<ClientSideRequestStatistics.StoreResponseStatistics>) storeResponseStatisticsField.get(requestStatistics);
return list;
}
private void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
storeResponseStatisticsField.set(requestStatistics, new ArrayList<ClientSideRequestStatistics.StoreResponseStatistics>());
}
private void validateTransportRequestTimelineGateway(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"requestSent\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
}
private void validateTransportRequestTimelineDirect(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"created\"");
assertThat(diagnostics).contains("\"eventName\":\"queued\"");
assertThat(diagnostics).contains("\"eventName\":\"pipelined\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
assertThat(diagnostics).contains("\"eventName\":\"completed\"");
}
private void validateJson(String jsonInString) {
try {
OBJECT_MAPPER.readTree(jsonInString);
} catch(JsonProcessingException ex) {
fail("Diagnostic string is not in json format");
}
}
public static class TestItem {
public String id;
public String mypk;
public TestItem() {
}
}
} | class CosmosDiagnosticsTest extends TestSuiteBase {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final DateTimeFormatter RESPONSE_TIME_FORMATTER =
DateTimeFormatter.ofPattern("dd MMM yyyy HH:mm:ss" + ".SSS").withLocale(Locale.US).withZone(ZoneOffset.UTC);
private CosmosClient gatewayClient;
private CosmosClient directClient;
private CosmosContainer container;
private CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() throws Exception {
assertThat(this.gatewayClient).isNull();
gatewayClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
directClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());
container = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.gatewayClient).isNotNull();
this.gatewayClient.close();
if (this.directClient != null) {
this.directClient.close();
}
}
@Test(groups = {"simple"})
public void gatewayDiagnostics() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"operationType\":\"Create\"");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void gatewayDiagnosticsOnException() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
container.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"statusCode\":404");
assertThat(diagnostics).contains("\"operationType\":\"Read\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
public void systemDiagnosticsForSystemStateInformation() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("systemInformation");
assertThat(diagnostics).contains("usedMemory");
assertThat(diagnostics).contains("availableMemory");
assertThat(diagnostics).contains("processCpuLoad");
assertThat(diagnostics).contains("systemCpuLoad");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
}
@Test(groups = {"simple"})
public void directDiagnostics() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = cosmosContainer.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(diagnostics).contains("supplementalResponseStatisticsList");
assertThat(diagnostics).contains("\"gatewayStatistics\":null");
assertThat(diagnostics).contains("addressResolutionStatistics");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"PARTITION_KEY_RANGE_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"SERVER_ADDRESS_LOOKUP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineDirect(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void directDiagnosticsOnException() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
cosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
@Test(groups = {"simple"})
public void serializationOnVariousScenarios() {
CosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();
String diagnostics = cosmosDatabase.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"DATABASE_DESERIALIZATION\"");
CosmosContainerResponse containerResponse = this.container.read();
diagnostics = containerResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"CONTAINER_DESERIALIZATION\"");
TestItem testItem = new TestItem();
testItem.id = "TestId";
testItem.mypk = "TestPk";
CosmosItemResponse<TestItem> itemResponse = this.container.createItem(testItem);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
testItem.id = "TestId2";
testItem.mypk = "TestPk";
itemResponse = this.container.createItem(testItem, new PartitionKey("TestPk"), null);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).doesNotContain("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).doesNotContain("\"serializationType\":\"ITEM_DESERIALIZATION\"");
TestItem readTestItem = itemResponse.getItem();
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
CosmosItemResponse<InternalObjectNode> readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);
InternalObjectNode properties = readItemResponse.getItem();
diagnostics = readItemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
}
private InternalObjectNode getInternalObjectNode() {
InternalObjectNode internalObjectNode = new InternalObjectNode();
internalObjectNode.setId(UUID.randomUUID().toString());
BridgeInternal.setProperty(internalObjectNode, "mypk", "test");
return internalObjectNode;
}
private List<ClientSideRequestStatistics.StoreResponseStatistics> getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
@SuppressWarnings({"unchecked"})
List<ClientSideRequestStatistics.StoreResponseStatistics> list
= (List<ClientSideRequestStatistics.StoreResponseStatistics>) storeResponseStatisticsField.get(requestStatistics);
return list;
}
private void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
storeResponseStatisticsField.set(requestStatistics, new ArrayList<ClientSideRequestStatistics.StoreResponseStatistics>());
}
private void validateTransportRequestTimelineGateway(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"requestSent\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
}
private void validateTransportRequestTimelineDirect(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"created\"");
assertThat(diagnostics).contains("\"eventName\":\"queued\"");
assertThat(diagnostics).contains("\"eventName\":\"pipelined\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
assertThat(diagnostics).contains("\"eventName\":\"completed\"");
}
private void validateJson(String jsonInString) {
try {
OBJECT_MAPPER.readTree(jsonInString);
} catch(JsonProcessingException ex) {
fail("Diagnostic string is not in json format");
}
}
public static class TestItem {
public String id;
public String mypk;
public TestItem() {
}
}
} |
Added | public void supplementalResponseStatisticsList() throws Exception {
ClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics();
for (int i = 0; i < 15; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
ObjectMapper objectMapper = new ObjectMapper();
String diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
JsonNode jsonNode = objectMapper.readTree(diagnostics);
ArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(15);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);
clearStoreResponseStatistics(clientSideRequestStatistics);
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
assertThat(storeResponseStatistics.size()).isEqualTo(0);
for (int i = 0; i < 7; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
objectMapper = new ObjectMapper();
diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
jsonNode = objectMapper.readTree(diagnostics);
supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(7);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);
for(JsonNode node : supplementalResponseStatisticsListNode) {
assertThat(node.get("storeResult")).isNotNull();
assertThat(node.get("requestResponseTimeUTC")).isNotNull();
assertThat(node.get("requestOperationType")).isNotNull();
assertThat(node.get("requestOperationType")).isNotNull();
}
} | assertThat(node.get("requestResponseTimeUTC")).isNotNull(); | public void supplementalResponseStatisticsList() throws Exception {
ClientSideRequestStatistics clientSideRequestStatistics = new ClientSideRequestStatistics();
for (int i = 0; i < 15; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
List<ClientSideRequestStatistics.StoreResponseStatistics> storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
ObjectMapper objectMapper = new ObjectMapper();
String diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
JsonNode jsonNode = objectMapper.readTree(diagnostics);
ArrayNode supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(15);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(10);
clearStoreResponseStatistics(clientSideRequestStatistics);
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
assertThat(storeResponseStatistics.size()).isEqualTo(0);
for (int i = 0; i < 7; i++) {
RxDocumentServiceRequest rxDocumentServiceRequest = RxDocumentServiceRequest.create(OperationType.Head, ResourceType.Document);
clientSideRequestStatistics.recordResponse(rxDocumentServiceRequest, null);
}
storeResponseStatistics = getStoreResponseStatistics(clientSideRequestStatistics);
objectMapper = new ObjectMapper();
diagnostics = objectMapper.writeValueAsString(clientSideRequestStatistics);
jsonNode = objectMapper.readTree(diagnostics);
supplementalResponseStatisticsListNode = (ArrayNode) jsonNode.get("supplementalResponseStatisticsList");
assertThat(storeResponseStatistics.size()).isEqualTo(7);
assertThat(supplementalResponseStatisticsListNode.size()).isEqualTo(7);
for(JsonNode node : supplementalResponseStatisticsListNode) {
assertThat(node.get("storeResult").asText()).isNotNull();
String requestResponseTimeUTC = node.get("requestResponseTimeUTC").asText();
String formattedInstant = RESPONSE_TIME_FORMATTER.format(Instant.now());
String[] requestResponseTimeUTCList = requestResponseTimeUTC.split(" ");
String[] formattedInstantList = formattedInstant.split(" ");
assertThat(requestResponseTimeUTC.length()).isEqualTo(formattedInstant.length());
assertThat(requestResponseTimeUTCList.length).isEqualTo(formattedInstantList.length);
assertThat(requestResponseTimeUTCList[0]).isEqualTo(formattedInstantList[0]);
assertThat(requestResponseTimeUTCList[1]).isEqualTo(formattedInstantList[1]);
assertThat(requestResponseTimeUTCList[2]).isEqualTo(formattedInstantList[2]);
assertThat(node.get("requestResponseTimeUTC")).isNotNull();
assertThat(node.get("requestOperationType")).isNotNull();
assertThat(node.get("requestOperationType")).isNotNull();
}
} | class CosmosDiagnosticsTest extends TestSuiteBase {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private CosmosClient gatewayClient;
private CosmosClient directClient;
private CosmosContainer container;
private CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() throws Exception {
assertThat(this.gatewayClient).isNull();
gatewayClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
directClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());
container = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.gatewayClient).isNotNull();
this.gatewayClient.close();
if (this.directClient != null) {
this.directClient.close();
}
}
@Test(groups = {"simple"})
public void gatewayDiagnostics() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"operationType\":\"Create\"");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void gatewayDiagnosticsOnException() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
container.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"statusCode\":404");
assertThat(diagnostics).contains("\"operationType\":\"Read\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
public void systemDiagnosticsForSystemStateInformation() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("systemInformation");
assertThat(diagnostics).contains("usedMemory");
assertThat(diagnostics).contains("availableMemory");
assertThat(diagnostics).contains("processCpuLoad");
assertThat(diagnostics).contains("systemCpuLoad");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
}
@Test(groups = {"simple"})
public void directDiagnostics() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = cosmosContainer.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(diagnostics).contains("supplementalResponseStatisticsList");
assertThat(diagnostics).contains("\"gatewayStatistics\":null");
assertThat(diagnostics).contains("addressResolutionStatistics");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"PARTITION_KEY_RANGE_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"SERVER_ADDRESS_LOOKUP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineDirect(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void directDiagnosticsOnException() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
cosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
@Test(groups = {"simple"})
public void serializationOnVariousScenarios() {
CosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();
String diagnostics = cosmosDatabase.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"DATABASE_DESERIALIZATION\"");
CosmosContainerResponse containerResponse = this.container.read();
diagnostics = containerResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"CONTAINER_DESERIALIZATION\"");
TestItem testItem = new TestItem();
testItem.id = "TestId";
testItem.mypk = "TestPk";
CosmosItemResponse<TestItem> itemResponse = this.container.createItem(testItem);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
testItem.id = "TestId2";
testItem.mypk = "TestPk";
itemResponse = this.container.createItem(testItem, new PartitionKey("TestPk"), null);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).doesNotContain("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).doesNotContain("\"serializationType\":\"ITEM_DESERIALIZATION\"");
TestItem readTestItem = itemResponse.getItem();
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
CosmosItemResponse<InternalObjectNode> readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);
InternalObjectNode properties = readItemResponse.getItem();
diagnostics = readItemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
}
private InternalObjectNode getInternalObjectNode() {
InternalObjectNode internalObjectNode = new InternalObjectNode();
internalObjectNode.setId(UUID.randomUUID().toString());
BridgeInternal.setProperty(internalObjectNode, "mypk", "test");
return internalObjectNode;
}
private List<ClientSideRequestStatistics.StoreResponseStatistics> getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
@SuppressWarnings({"unchecked"})
List<ClientSideRequestStatistics.StoreResponseStatistics> list
= (List<ClientSideRequestStatistics.StoreResponseStatistics>) storeResponseStatisticsField.get(requestStatistics);
return list;
}
private void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
storeResponseStatisticsField.set(requestStatistics, new ArrayList<ClientSideRequestStatistics.StoreResponseStatistics>());
}
private void validateTransportRequestTimelineGateway(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"requestSent\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
}
private void validateTransportRequestTimelineDirect(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"created\"");
assertThat(diagnostics).contains("\"eventName\":\"queued\"");
assertThat(diagnostics).contains("\"eventName\":\"pipelined\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
assertThat(diagnostics).contains("\"eventName\":\"completed\"");
}
private void validateJson(String jsonInString) {
try {
OBJECT_MAPPER.readTree(jsonInString);
} catch(JsonProcessingException ex) {
fail("Diagnostic string is not in json format");
}
}
public static class TestItem {
public String id;
public String mypk;
public TestItem() {
}
}
} | class CosmosDiagnosticsTest extends TestSuiteBase {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final DateTimeFormatter RESPONSE_TIME_FORMATTER =
DateTimeFormatter.ofPattern("dd MMM yyyy HH:mm:ss" + ".SSS").withLocale(Locale.US).withZone(ZoneOffset.UTC);
private CosmosClient gatewayClient;
private CosmosClient directClient;
private CosmosContainer container;
private CosmosAsyncContainer cosmosAsyncContainer;
@BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT)
public void beforeClass() throws Exception {
assertThat(this.gatewayClient).isNull();
gatewayClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
directClient = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
cosmosAsyncContainer = getSharedMultiPartitionCosmosContainer(this.gatewayClient.asyncClient());
container = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
}
@AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true)
public void afterClass() {
assertThat(this.gatewayClient).isNotNull();
this.gatewayClient.close();
if (this.directClient != null) {
this.directClient.close();
}
}
@Test(groups = {"simple"})
public void gatewayDiagnostics() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"operationType\":\"Create\"");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void gatewayDiagnosticsOnException() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.gatewayMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
container.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"GATEWAY\"");
assertThat(diagnostics).doesNotContain(("\"gatewayStatistics\":null"));
assertThat(diagnostics).contains("\"statusCode\":404");
assertThat(diagnostics).contains("\"operationType\":\"Read\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineGateway(diagnostics);
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
public void systemDiagnosticsForSystemStateInformation() {
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = this.container.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("systemInformation");
assertThat(diagnostics).contains("usedMemory");
assertThat(diagnostics).contains("availableMemory");
assertThat(diagnostics).contains("processCpuLoad");
assertThat(diagnostics).contains("systemCpuLoad");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
}
@Test(groups = {"simple"})
public void directDiagnostics() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = cosmosContainer.createItem(internalObjectNode);
String diagnostics = createResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(diagnostics).contains("supplementalResponseStatisticsList");
assertThat(diagnostics).contains("\"gatewayStatistics\":null");
assertThat(diagnostics).contains("addressResolutionStatistics");
assertThat(diagnostics).contains("\"metaDataName\":\"CONTAINER_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"PARTITION_KEY_RANGE_LOOK_UP\"");
assertThat(diagnostics).contains("\"metaDataName\":\"SERVER_ADDRESS_LOOKUP\"");
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
assertThat(createResponse.getDiagnostics().getDuration()).isNotNull();
validateTransportRequestTimelineDirect(diagnostics);
validateJson(diagnostics);
}
@Test(groups = {"simple"})
public void directDiagnosticsOnException() {
CosmosContainer cosmosContainer = directClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
InternalObjectNode internalObjectNode = getInternalObjectNode();
CosmosItemResponse<InternalObjectNode> createResponse = null;
CosmosClient client = null;
try {
client = new CosmosClientBuilder()
.endpoint(TestConfigurations.HOST)
.key(TestConfigurations.MASTER_KEY)
.contentResponseOnWriteEnabled(true)
.directMode()
.buildClient();
CosmosContainer container = client.getDatabase(cosmosAsyncContainer.getDatabase().getId()).getContainer(cosmosAsyncContainer.getId());
createResponse = container.createItem(internalObjectNode);
CosmosItemRequestOptions cosmosItemRequestOptions = new CosmosItemRequestOptions();
ModelBridgeInternal.setPartitionKey(cosmosItemRequestOptions, new PartitionKey("wrongPartitionKey"));
CosmosItemResponse<InternalObjectNode> readResponse =
cosmosContainer.readItem(BridgeInternal.getProperties(createResponse).getId(),
new PartitionKey("wrongPartitionKey"),
InternalObjectNode.class);
fail("request should fail as partition key is wrong");
} catch (CosmosException exception) {
String diagnostics = exception.getDiagnostics().toString();
assertThat(exception.getStatusCode()).isEqualTo(HttpConstants.StatusCodes.NOTFOUND);
assertThat(diagnostics).contains("\"connectionMode\":\"DIRECT\"");
assertThat(exception.getDiagnostics().getDuration()).isNotNull();
validateJson(diagnostics);
} finally {
if (client != null) {
client.close();
}
}
}
@Test(groups = {"simple"})
@Test(groups = {"simple"})
public void serializationOnVariousScenarios() {
CosmosDatabaseResponse cosmosDatabase = gatewayClient.getDatabase(cosmosAsyncContainer.getDatabase().getId()).read();
String diagnostics = cosmosDatabase.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"DATABASE_DESERIALIZATION\"");
CosmosContainerResponse containerResponse = this.container.read();
diagnostics = containerResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"CONTAINER_DESERIALIZATION\"");
TestItem testItem = new TestItem();
testItem.id = "TestId";
testItem.mypk = "TestPk";
CosmosItemResponse<TestItem> itemResponse = this.container.createItem(testItem);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
testItem.id = "TestId2";
testItem.mypk = "TestPk";
itemResponse = this.container.createItem(testItem, new PartitionKey("TestPk"), null);
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).doesNotContain("\"serializationType\":\"PARTITION_KEY_FETCH_SERIALIZATION\"");
assertThat(diagnostics).doesNotContain("\"serializationType\":\"ITEM_DESERIALIZATION\"");
TestItem readTestItem = itemResponse.getItem();
diagnostics = itemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
CosmosItemResponse<InternalObjectNode> readItemResponse = this.container.readItem(testItem.id, new PartitionKey(testItem.mypk), null, InternalObjectNode.class);
InternalObjectNode properties = readItemResponse.getItem();
diagnostics = readItemResponse.getDiagnostics().toString();
assertThat(diagnostics).contains("\"serializationType\":\"ITEM_DESERIALIZATION\"");
assertThat(diagnostics).contains("\"userAgent\":\"" + Utils.getUserAgent() + "\"");
}
private InternalObjectNode getInternalObjectNode() {
InternalObjectNode internalObjectNode = new InternalObjectNode();
internalObjectNode.setId(UUID.randomUUID().toString());
BridgeInternal.setProperty(internalObjectNode, "mypk", "test");
return internalObjectNode;
}
private List<ClientSideRequestStatistics.StoreResponseStatistics> getStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
@SuppressWarnings({"unchecked"})
List<ClientSideRequestStatistics.StoreResponseStatistics> list
= (List<ClientSideRequestStatistics.StoreResponseStatistics>) storeResponseStatisticsField.get(requestStatistics);
return list;
}
private void clearStoreResponseStatistics(ClientSideRequestStatistics requestStatistics) throws Exception {
Field storeResponseStatisticsField = ClientSideRequestStatistics.class.getDeclaredField("supplementalResponseStatisticsList");
storeResponseStatisticsField.setAccessible(true);
storeResponseStatisticsField.set(requestStatistics, new ArrayList<ClientSideRequestStatistics.StoreResponseStatistics>());
}
private void validateTransportRequestTimelineGateway(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"connectionConfigured\"");
assertThat(diagnostics).contains("\"eventName\":\"requestSent\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
}
private void validateTransportRequestTimelineDirect(String diagnostics) {
assertThat(diagnostics).contains("\"eventName\":\"created\"");
assertThat(diagnostics).contains("\"eventName\":\"queued\"");
assertThat(diagnostics).contains("\"eventName\":\"pipelined\"");
assertThat(diagnostics).contains("\"eventName\":\"transitTime\"");
assertThat(diagnostics).contains("\"eventName\":\"received\"");
assertThat(diagnostics).contains("\"eventName\":\"completed\"");
}
private void validateJson(String jsonInString) {
try {
OBJECT_MAPPER.readTree(jsonInString);
} catch(JsonProcessingException ex) {
fail("Diagnostic string is not in json format");
}
}
public static class TestItem {
public String id;
public String mypk;
public TestItem() {
}
}
} |
nit: `switch` would be more natural choice here imo. | public void copyIncrementalWithResponseCodeSnippet2() {
final String snapshot = "copy snapshot";
PageBlobCopyIncrementalRequestConditions destinationRequestConditions = new PageBlobCopyIncrementalRequestConditions()
.setIfNoneMatch("snapshotMatch");
Context context = new Context(key, value);
CopyStatusType statusType = client
.copyIncrementalWithResponse(new PageBlobCopyIncrementalOptions(url, snapshot)
.setRequestConditions(destinationRequestConditions), timeout, context).getValue();
if (CopyStatusType.SUCCESS == statusType) {
System.out.println("Page blob copied successfully");
} else if (CopyStatusType.FAILED == statusType) {
System.out.println("Page blob copied failed");
} else if (CopyStatusType.ABORTED == statusType) {
System.out.println("Page blob copied aborted");
} else if (CopyStatusType.PENDING == statusType) {
System.out.println("Page blob copied pending");
}
} | if (CopyStatusType.SUCCESS == statusType) { | public void copyIncrementalWithResponseCodeSnippet2() {
final String snapshot = "copy snapshot";
PageBlobCopyIncrementalRequestConditions destinationRequestConditions = new PageBlobCopyIncrementalRequestConditions()
.setIfNoneMatch("snapshotMatch");
Context context = new Context(key, value);
CopyStatusType statusType = client
.copyIncrementalWithResponse(new PageBlobCopyIncrementalOptions(url, snapshot)
.setRequestConditions(destinationRequestConditions), timeout, context).getValue();
switch (statusType) {
case SUCCESS:
System.out.println("Page blob copied successfully");
break;
case FAILED:
System.out.println("Page blob copied failed");
break;
case ABORTED:
System.out.println("Page blob copied aborted");
break;
case PENDING:
System.out.println("Page blob copied pending");
break;
default:
break;
}
} | class PageBlobClientJavaDocCodeSnippets {
private PageBlobClient client = new SpecializedBlobClientBuilder().buildPageBlobClient();
private Map<String, String> metadata = Collections.singletonMap("metadata", "value");
private Map<String, String> tags = Collections.singletonMap("tag", "value");
private String leaseId = "leaseId";
private Duration timeout = Duration.ofSeconds(30);
private long size = 1024;
private long sequenceNumber = 0;
private long sourceOffset = 0;
private long offset = 0;
private String key = "key";
private String value = "value";
private String data = "data";
private String url = "https:
/**
* Code snippets for {@link PageBlobClient
*/
public void createCodeSnippet() {
PageBlobItem pageBlob = client.create(size);
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithOverwrite() {
boolean overwrite = false;
PageBlobItem pageBlob = client.create(size, overwrite);
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithResponseCodeSnippet() {
BlobHttpHeaders headers = new BlobHttpHeaders()
.setContentLanguage("en-US")
.setContentType("binary");
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.createWithResponse(size, sequenceNumber, headers, metadata, blobRequestConditions, timeout, context)
.getValue();
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithResponse2CodeSnippet() {
BlobHttpHeaders headers = new BlobHttpHeaders()
.setContentLanguage("en-US")
.setContentType("binary");
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.createWithResponse(new PageBlobCreateOptions(size).setSequenceNumber(sequenceNumber)
.setHeaders(headers).setMetadata(metadata).setTags(tags)
.setRequestConditions(blobRequestConditions), timeout,
context)
.getValue();
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void uploadPagesCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
PageBlobItem pageBlob = client.uploadPages(pageRange, dataStream);
System.out.printf("Uploaded page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* PageBlobRequestConditions, Duration, Context)}
*
* @throws NoSuchAlgorithmException If Md5 calculation fails
*/
public void uploadPagesWithResponseCodeSnippet() throws NoSuchAlgorithmException {
byte[] md5 = MessageDigest.getInstance("MD5").digest("data".getBytes(StandardCharsets.UTF_8));
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.uploadPagesWithResponse(pageRange, dataStream, md5, pageBlobRequestConditions, timeout, context).getValue();
System.out.printf("Uploaded page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void uploadPagesFromURLCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobItem pageBlob = client.uploadPagesFromUrl(pageRange, url, sourceOffset);
System.out.printf("Uploaded page blob from URL with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* PageBlobRequestConditions, BlobRequestConditions, Duration, Context)}
*/
public void uploadPagesFromUrlWithResponseCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
byte[] sourceContentMD5 = new byte[512];
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
BlobRequestConditions sourceRequestConditions = new BlobRequestConditions()
.setIfUnmodifiedSince(OffsetDateTime.now().minusDays(3));
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.uploadPagesFromUrlWithResponse(pageRange, url, sourceOffset, sourceContentMD5, pageBlobRequestConditions,
sourceRequestConditions, timeout, context).getValue();
System.out.printf("Uploaded page blob from URL with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void clearPagesCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobItem pageBlob = client.clearPages(pageRange);
System.out.printf("Cleared page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* Context)}
*/
public void clearPagesWithResponseCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.clearPagesWithResponse(pageRange, pageBlobRequestConditions, timeout, context).getValue();
System.out.printf("Cleared page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
PageList pageList = client.getPageRanges(blobRange);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Context)}
*/
public void getPageRangesWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesWithResponse(blobRange, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesDiffCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshot = "previous snapshot";
PageList pageList = client.getPageRangesDiff(blobRange, prevSnapshot);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void getPageRangesDiffWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshot = "previous snapshot";
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesDiffWithResponse(blobRange, prevSnapshot, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesDiffFromUrlCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshotUrl = "previous snapshot url";
PageList pageList = client.getPageRangesDiff(blobRange, prevSnapshotUrl);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void getPageRangesDiffFromUrlWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshotUrl = "previous snapshot url";
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesDiffWithResponse(blobRange, prevSnapshotUrl, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void resizeCodeSnippet() {
PageBlobItem pageBlob = client.resize(size);
System.out.printf("Page blob resized with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void resizeWithResponseCodeSnippet() {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.resizeWithResponse(size, blobRequestConditions, timeout, context).getValue();
System.out.printf("Page blob resized with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void updateSequenceNumberCodeSnippet() {
PageBlobItem pageBlob = client.updateSequenceNumber(SequenceNumberActionType.INCREMENT, size);
System.out.printf("Page blob updated to sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* BlobRequestConditions, Duration, Context)}
*/
public void updateSequenceNumberWithResponseCodeSnippet() {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client.updateSequenceNumberWithResponse(
SequenceNumberActionType.INCREMENT, size, blobRequestConditions, timeout, context).getValue();
System.out.printf("Page blob updated to sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void copyIncrementalCodeSnippet() {
final String snapshot = "copy snapshot";
CopyStatusType statusType = client.copyIncremental(url, snapshot);
if (CopyStatusType.SUCCESS == statusType) {
System.out.println("Page blob copied successfully");
} else if (CopyStatusType.FAILED == statusType) {
System.out.println("Page blob copied failed");
} else if (CopyStatusType.ABORTED == statusType) {
System.out.println("Page blob copied aborted");
} else if (CopyStatusType.PENDING == statusType) {
System.out.println("Page blob copied pending");
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void copyIncrementalWithResponseCodeSnippet() {
final String snapshot = "copy snapshot";
RequestConditions modifiedRequestConditions = new RequestConditions()
.setIfNoneMatch("snapshotMatch");
Context context = new Context(key, value);
CopyStatusType statusType = client
.copyIncrementalWithResponse(url, snapshot, modifiedRequestConditions, timeout, context).getValue();
if (CopyStatusType.SUCCESS == statusType) {
System.out.println("Page blob copied successfully");
} else if (CopyStatusType.FAILED == statusType) {
System.out.println("Page blob copied failed");
} else if (CopyStatusType.ABORTED == statusType) {
System.out.println("Page blob copied aborted");
} else if (CopyStatusType.PENDING == statusType) {
System.out.println("Page blob copied pending");
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
} | class PageBlobClientJavaDocCodeSnippets {
private PageBlobClient client = new SpecializedBlobClientBuilder().buildPageBlobClient();
private Map<String, String> metadata = Collections.singletonMap("metadata", "value");
private Map<String, String> tags = Collections.singletonMap("tag", "value");
private String leaseId = "leaseId";
private Duration timeout = Duration.ofSeconds(30);
private long size = 1024;
private long sequenceNumber = 0;
private long sourceOffset = 0;
private long offset = 0;
private String key = "key";
private String value = "value";
private String data = "data";
private String url = "https:
/**
* Code snippets for {@link PageBlobClient
*/
public void createCodeSnippet() {
PageBlobItem pageBlob = client.create(size);
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithOverwrite() {
boolean overwrite = false;
PageBlobItem pageBlob = client.create(size, overwrite);
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithResponseCodeSnippet() {
BlobHttpHeaders headers = new BlobHttpHeaders()
.setContentLanguage("en-US")
.setContentType("binary");
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.createWithResponse(size, sequenceNumber, headers, metadata, blobRequestConditions, timeout, context)
.getValue();
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithResponse2CodeSnippet() {
BlobHttpHeaders headers = new BlobHttpHeaders()
.setContentLanguage("en-US")
.setContentType("binary");
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.createWithResponse(new PageBlobCreateOptions(size).setSequenceNumber(sequenceNumber)
.setHeaders(headers).setMetadata(metadata).setTags(tags)
.setRequestConditions(blobRequestConditions), timeout,
context)
.getValue();
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void uploadPagesCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
PageBlobItem pageBlob = client.uploadPages(pageRange, dataStream);
System.out.printf("Uploaded page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* PageBlobRequestConditions, Duration, Context)}
*
* @throws NoSuchAlgorithmException If Md5 calculation fails
*/
public void uploadPagesWithResponseCodeSnippet() throws NoSuchAlgorithmException {
byte[] md5 = MessageDigest.getInstance("MD5").digest("data".getBytes(StandardCharsets.UTF_8));
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.uploadPagesWithResponse(pageRange, dataStream, md5, pageBlobRequestConditions, timeout, context).getValue();
System.out.printf("Uploaded page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void uploadPagesFromURLCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobItem pageBlob = client.uploadPagesFromUrl(pageRange, url, sourceOffset);
System.out.printf("Uploaded page blob from URL with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* PageBlobRequestConditions, BlobRequestConditions, Duration, Context)}
*/
public void uploadPagesFromUrlWithResponseCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
byte[] sourceContentMD5 = new byte[512];
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
BlobRequestConditions sourceRequestConditions = new BlobRequestConditions()
.setIfUnmodifiedSince(OffsetDateTime.now().minusDays(3));
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.uploadPagesFromUrlWithResponse(pageRange, url, sourceOffset, sourceContentMD5, pageBlobRequestConditions,
sourceRequestConditions, timeout, context).getValue();
System.out.printf("Uploaded page blob from URL with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void clearPagesCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobItem pageBlob = client.clearPages(pageRange);
System.out.printf("Cleared page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* Context)}
*/
public void clearPagesWithResponseCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.clearPagesWithResponse(pageRange, pageBlobRequestConditions, timeout, context).getValue();
System.out.printf("Cleared page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
PageList pageList = client.getPageRanges(blobRange);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Context)}
*/
public void getPageRangesWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesWithResponse(blobRange, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesDiffCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshot = "previous snapshot";
PageList pageList = client.getPageRangesDiff(blobRange, prevSnapshot);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void getPageRangesDiffWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshot = "previous snapshot";
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesDiffWithResponse(blobRange, prevSnapshot, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesDiffFromUrlCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshotUrl = "previous snapshot url";
PageList pageList = client.getPageRangesDiff(blobRange, prevSnapshotUrl);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void getPageRangesDiffFromUrlWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshotUrl = "previous snapshot url";
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesDiffWithResponse(blobRange, prevSnapshotUrl, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void resizeCodeSnippet() {
PageBlobItem pageBlob = client.resize(size);
System.out.printf("Page blob resized with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void resizeWithResponseCodeSnippet() {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.resizeWithResponse(size, blobRequestConditions, timeout, context).getValue();
System.out.printf("Page blob resized with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void updateSequenceNumberCodeSnippet() {
PageBlobItem pageBlob = client.updateSequenceNumber(SequenceNumberActionType.INCREMENT, size);
System.out.printf("Page blob updated to sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* BlobRequestConditions, Duration, Context)}
*/
public void updateSequenceNumberWithResponseCodeSnippet() {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client.updateSequenceNumberWithResponse(
SequenceNumberActionType.INCREMENT, size, blobRequestConditions, timeout, context).getValue();
System.out.printf("Page blob updated to sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void copyIncrementalCodeSnippet() {
final String snapshot = "copy snapshot";
CopyStatusType statusType = client.copyIncremental(url, snapshot);
switch (statusType) {
case SUCCESS:
System.out.println("Page blob copied successfully");
break;
case FAILED:
System.out.println("Page blob copied failed");
break;
case ABORTED:
System.out.println("Page blob copied aborted");
break;
case PENDING:
System.out.println("Page blob copied pending");
break;
default:
break;
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void copyIncrementalWithResponseCodeSnippet() {
final String snapshot = "copy snapshot";
RequestConditions modifiedRequestConditions = new RequestConditions()
.setIfNoneMatch("snapshotMatch");
Context context = new Context(key, value);
CopyStatusType statusType = client
.copyIncrementalWithResponse(url, snapshot, modifiedRequestConditions, timeout, context).getValue();
switch (statusType) {
case SUCCESS:
System.out.println("Page blob copied successfully");
break;
case FAILED:
System.out.println("Page blob copied failed");
break;
case ABORTED:
System.out.println("Page blob copied aborted");
break;
case PENDING:
System.out.println("Page blob copied pending");
break;
default:
break;
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
} |
yeah I can change that. This was just copy pasted from the original snippet | public void copyIncrementalWithResponseCodeSnippet2() {
final String snapshot = "copy snapshot";
PageBlobCopyIncrementalRequestConditions destinationRequestConditions = new PageBlobCopyIncrementalRequestConditions()
.setIfNoneMatch("snapshotMatch");
Context context = new Context(key, value);
CopyStatusType statusType = client
.copyIncrementalWithResponse(new PageBlobCopyIncrementalOptions(url, snapshot)
.setRequestConditions(destinationRequestConditions), timeout, context).getValue();
if (CopyStatusType.SUCCESS == statusType) {
System.out.println("Page blob copied successfully");
} else if (CopyStatusType.FAILED == statusType) {
System.out.println("Page blob copied failed");
} else if (CopyStatusType.ABORTED == statusType) {
System.out.println("Page blob copied aborted");
} else if (CopyStatusType.PENDING == statusType) {
System.out.println("Page blob copied pending");
}
} | if (CopyStatusType.SUCCESS == statusType) { | public void copyIncrementalWithResponseCodeSnippet2() {
final String snapshot = "copy snapshot";
PageBlobCopyIncrementalRequestConditions destinationRequestConditions = new PageBlobCopyIncrementalRequestConditions()
.setIfNoneMatch("snapshotMatch");
Context context = new Context(key, value);
CopyStatusType statusType = client
.copyIncrementalWithResponse(new PageBlobCopyIncrementalOptions(url, snapshot)
.setRequestConditions(destinationRequestConditions), timeout, context).getValue();
switch (statusType) {
case SUCCESS:
System.out.println("Page blob copied successfully");
break;
case FAILED:
System.out.println("Page blob copied failed");
break;
case ABORTED:
System.out.println("Page blob copied aborted");
break;
case PENDING:
System.out.println("Page blob copied pending");
break;
default:
break;
}
} | class PageBlobClientJavaDocCodeSnippets {
private PageBlobClient client = new SpecializedBlobClientBuilder().buildPageBlobClient();
private Map<String, String> metadata = Collections.singletonMap("metadata", "value");
private Map<String, String> tags = Collections.singletonMap("tag", "value");
private String leaseId = "leaseId";
private Duration timeout = Duration.ofSeconds(30);
private long size = 1024;
private long sequenceNumber = 0;
private long sourceOffset = 0;
private long offset = 0;
private String key = "key";
private String value = "value";
private String data = "data";
private String url = "https:
/**
* Code snippets for {@link PageBlobClient
*/
public void createCodeSnippet() {
PageBlobItem pageBlob = client.create(size);
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithOverwrite() {
boolean overwrite = false;
PageBlobItem pageBlob = client.create(size, overwrite);
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithResponseCodeSnippet() {
BlobHttpHeaders headers = new BlobHttpHeaders()
.setContentLanguage("en-US")
.setContentType("binary");
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.createWithResponse(size, sequenceNumber, headers, metadata, blobRequestConditions, timeout, context)
.getValue();
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithResponse2CodeSnippet() {
BlobHttpHeaders headers = new BlobHttpHeaders()
.setContentLanguage("en-US")
.setContentType("binary");
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.createWithResponse(new PageBlobCreateOptions(size).setSequenceNumber(sequenceNumber)
.setHeaders(headers).setMetadata(metadata).setTags(tags)
.setRequestConditions(blobRequestConditions), timeout,
context)
.getValue();
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void uploadPagesCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
PageBlobItem pageBlob = client.uploadPages(pageRange, dataStream);
System.out.printf("Uploaded page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* PageBlobRequestConditions, Duration, Context)}
*
* @throws NoSuchAlgorithmException If Md5 calculation fails
*/
public void uploadPagesWithResponseCodeSnippet() throws NoSuchAlgorithmException {
byte[] md5 = MessageDigest.getInstance("MD5").digest("data".getBytes(StandardCharsets.UTF_8));
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.uploadPagesWithResponse(pageRange, dataStream, md5, pageBlobRequestConditions, timeout, context).getValue();
System.out.printf("Uploaded page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void uploadPagesFromURLCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobItem pageBlob = client.uploadPagesFromUrl(pageRange, url, sourceOffset);
System.out.printf("Uploaded page blob from URL with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* PageBlobRequestConditions, BlobRequestConditions, Duration, Context)}
*/
public void uploadPagesFromUrlWithResponseCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
byte[] sourceContentMD5 = new byte[512];
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
BlobRequestConditions sourceRequestConditions = new BlobRequestConditions()
.setIfUnmodifiedSince(OffsetDateTime.now().minusDays(3));
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.uploadPagesFromUrlWithResponse(pageRange, url, sourceOffset, sourceContentMD5, pageBlobRequestConditions,
sourceRequestConditions, timeout, context).getValue();
System.out.printf("Uploaded page blob from URL with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void clearPagesCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobItem pageBlob = client.clearPages(pageRange);
System.out.printf("Cleared page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* Context)}
*/
public void clearPagesWithResponseCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.clearPagesWithResponse(pageRange, pageBlobRequestConditions, timeout, context).getValue();
System.out.printf("Cleared page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
PageList pageList = client.getPageRanges(blobRange);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Context)}
*/
public void getPageRangesWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesWithResponse(blobRange, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesDiffCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshot = "previous snapshot";
PageList pageList = client.getPageRangesDiff(blobRange, prevSnapshot);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void getPageRangesDiffWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshot = "previous snapshot";
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesDiffWithResponse(blobRange, prevSnapshot, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesDiffFromUrlCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshotUrl = "previous snapshot url";
PageList pageList = client.getPageRangesDiff(blobRange, prevSnapshotUrl);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void getPageRangesDiffFromUrlWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshotUrl = "previous snapshot url";
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesDiffWithResponse(blobRange, prevSnapshotUrl, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void resizeCodeSnippet() {
PageBlobItem pageBlob = client.resize(size);
System.out.printf("Page blob resized with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void resizeWithResponseCodeSnippet() {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.resizeWithResponse(size, blobRequestConditions, timeout, context).getValue();
System.out.printf("Page blob resized with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void updateSequenceNumberCodeSnippet() {
PageBlobItem pageBlob = client.updateSequenceNumber(SequenceNumberActionType.INCREMENT, size);
System.out.printf("Page blob updated to sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* BlobRequestConditions, Duration, Context)}
*/
public void updateSequenceNumberWithResponseCodeSnippet() {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client.updateSequenceNumberWithResponse(
SequenceNumberActionType.INCREMENT, size, blobRequestConditions, timeout, context).getValue();
System.out.printf("Page blob updated to sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void copyIncrementalCodeSnippet() {
final String snapshot = "copy snapshot";
CopyStatusType statusType = client.copyIncremental(url, snapshot);
if (CopyStatusType.SUCCESS == statusType) {
System.out.println("Page blob copied successfully");
} else if (CopyStatusType.FAILED == statusType) {
System.out.println("Page blob copied failed");
} else if (CopyStatusType.ABORTED == statusType) {
System.out.println("Page blob copied aborted");
} else if (CopyStatusType.PENDING == statusType) {
System.out.println("Page blob copied pending");
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void copyIncrementalWithResponseCodeSnippet() {
final String snapshot = "copy snapshot";
RequestConditions modifiedRequestConditions = new RequestConditions()
.setIfNoneMatch("snapshotMatch");
Context context = new Context(key, value);
CopyStatusType statusType = client
.copyIncrementalWithResponse(url, snapshot, modifiedRequestConditions, timeout, context).getValue();
if (CopyStatusType.SUCCESS == statusType) {
System.out.println("Page blob copied successfully");
} else if (CopyStatusType.FAILED == statusType) {
System.out.println("Page blob copied failed");
} else if (CopyStatusType.ABORTED == statusType) {
System.out.println("Page blob copied aborted");
} else if (CopyStatusType.PENDING == statusType) {
System.out.println("Page blob copied pending");
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
} | class PageBlobClientJavaDocCodeSnippets {
private PageBlobClient client = new SpecializedBlobClientBuilder().buildPageBlobClient();
private Map<String, String> metadata = Collections.singletonMap("metadata", "value");
private Map<String, String> tags = Collections.singletonMap("tag", "value");
private String leaseId = "leaseId";
private Duration timeout = Duration.ofSeconds(30);
private long size = 1024;
private long sequenceNumber = 0;
private long sourceOffset = 0;
private long offset = 0;
private String key = "key";
private String value = "value";
private String data = "data";
private String url = "https:
/**
* Code snippets for {@link PageBlobClient
*/
public void createCodeSnippet() {
PageBlobItem pageBlob = client.create(size);
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithOverwrite() {
boolean overwrite = false;
PageBlobItem pageBlob = client.create(size, overwrite);
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithResponseCodeSnippet() {
BlobHttpHeaders headers = new BlobHttpHeaders()
.setContentLanguage("en-US")
.setContentType("binary");
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.createWithResponse(size, sequenceNumber, headers, metadata, blobRequestConditions, timeout, context)
.getValue();
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void createWithResponse2CodeSnippet() {
BlobHttpHeaders headers = new BlobHttpHeaders()
.setContentLanguage("en-US")
.setContentType("binary");
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.createWithResponse(new PageBlobCreateOptions(size).setSequenceNumber(sequenceNumber)
.setHeaders(headers).setMetadata(metadata).setTags(tags)
.setRequestConditions(blobRequestConditions), timeout,
context)
.getValue();
System.out.printf("Created page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void uploadPagesCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
PageBlobItem pageBlob = client.uploadPages(pageRange, dataStream);
System.out.printf("Uploaded page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* PageBlobRequestConditions, Duration, Context)}
*
* @throws NoSuchAlgorithmException If Md5 calculation fails
*/
public void uploadPagesWithResponseCodeSnippet() throws NoSuchAlgorithmException {
byte[] md5 = MessageDigest.getInstance("MD5").digest("data".getBytes(StandardCharsets.UTF_8));
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.uploadPagesWithResponse(pageRange, dataStream, md5, pageBlobRequestConditions, timeout, context).getValue();
System.out.printf("Uploaded page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void uploadPagesFromURLCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobItem pageBlob = client.uploadPagesFromUrl(pageRange, url, sourceOffset);
System.out.printf("Uploaded page blob from URL with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* PageBlobRequestConditions, BlobRequestConditions, Duration, Context)}
*/
public void uploadPagesFromUrlWithResponseCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
InputStream dataStream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8));
byte[] sourceContentMD5 = new byte[512];
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
BlobRequestConditions sourceRequestConditions = new BlobRequestConditions()
.setIfUnmodifiedSince(OffsetDateTime.now().minusDays(3));
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.uploadPagesFromUrlWithResponse(pageRange, url, sourceOffset, sourceContentMD5, pageBlobRequestConditions,
sourceRequestConditions, timeout, context).getValue();
System.out.printf("Uploaded page blob from URL with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void clearPagesCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobItem pageBlob = client.clearPages(pageRange);
System.out.printf("Cleared page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* Context)}
*/
public void clearPagesWithResponseCodeSnippet() {
PageRange pageRange = new PageRange()
.setStart(0)
.setEnd(511);
PageBlobRequestConditions pageBlobRequestConditions = new PageBlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.clearPagesWithResponse(pageRange, pageBlobRequestConditions, timeout, context).getValue();
System.out.printf("Cleared page blob with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
PageList pageList = client.getPageRanges(blobRange);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Context)}
*/
public void getPageRangesWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesWithResponse(blobRange, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesDiffCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshot = "previous snapshot";
PageList pageList = client.getPageRangesDiff(blobRange, prevSnapshot);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void getPageRangesDiffWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshot = "previous snapshot";
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesDiffWithResponse(blobRange, prevSnapshot, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void getPageRangesDiffFromUrlCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshotUrl = "previous snapshot url";
PageList pageList = client.getPageRangesDiff(blobRange, prevSnapshotUrl);
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void getPageRangesDiffFromUrlWithResponseCodeSnippet() {
BlobRange blobRange = new BlobRange(offset);
final String prevSnapshotUrl = "previous snapshot url";
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageList pageList = client
.getPageRangesDiffWithResponse(blobRange, prevSnapshotUrl, blobRequestConditions, timeout, context).getValue();
System.out.println("Valid Page Ranges are:");
for (PageRange pageRange : pageList.getPageRange()) {
System.out.printf("Start: %s, End: %s%n", pageRange.getStart(), pageRange.getEnd());
}
}
/**
* Code snippets for {@link PageBlobClient
*/
public void resizeCodeSnippet() {
PageBlobItem pageBlob = client.resize(size);
System.out.printf("Page blob resized with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void resizeWithResponseCodeSnippet() {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client
.resizeWithResponse(size, blobRequestConditions, timeout, context).getValue();
System.out.printf("Page blob resized with sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void updateSequenceNumberCodeSnippet() {
PageBlobItem pageBlob = client.updateSequenceNumber(SequenceNumberActionType.INCREMENT, size);
System.out.printf("Page blob updated to sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
* BlobRequestConditions, Duration, Context)}
*/
public void updateSequenceNumberWithResponseCodeSnippet() {
BlobRequestConditions blobRequestConditions = new BlobRequestConditions().setLeaseId(leaseId);
Context context = new Context(key, value);
PageBlobItem pageBlob = client.updateSequenceNumberWithResponse(
SequenceNumberActionType.INCREMENT, size, blobRequestConditions, timeout, context).getValue();
System.out.printf("Page blob updated to sequence number %s%n", pageBlob.getBlobSequenceNumber());
}
/**
* Code snippets for {@link PageBlobClient
*/
public void copyIncrementalCodeSnippet() {
final String snapshot = "copy snapshot";
CopyStatusType statusType = client.copyIncremental(url, snapshot);
switch (statusType) {
case SUCCESS:
System.out.println("Page blob copied successfully");
break;
case FAILED:
System.out.println("Page blob copied failed");
break;
case ABORTED:
System.out.println("Page blob copied aborted");
break;
case PENDING:
System.out.println("Page blob copied pending");
break;
default:
break;
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
public void copyIncrementalWithResponseCodeSnippet() {
final String snapshot = "copy snapshot";
RequestConditions modifiedRequestConditions = new RequestConditions()
.setIfNoneMatch("snapshotMatch");
Context context = new Context(key, value);
CopyStatusType statusType = client
.copyIncrementalWithResponse(url, snapshot, modifiedRequestConditions, timeout, context).getValue();
switch (statusType) {
case SUCCESS:
System.out.println("Page blob copied successfully");
break;
case FAILED:
System.out.println("Page blob copied failed");
break;
case ABORTED:
System.out.println("Page blob copied aborted");
break;
case PENDING:
System.out.println("Page blob copied pending");
break;
default:
break;
}
}
/**
* Code snippets for {@link PageBlobClient
* Duration, Context)}
*/
} |
nit: no need for extra variable creation here. | Mono<PiiEntityCollection> recognizePiiEntities(String document, String language) {
try {
Objects.requireNonNull(document, "'document' cannot be null.");
final TextDocumentInput textDocumentInput = new TextDocumentInput("0", document).setLanguage(language);
return recognizePiiEntitiesBatch(Collections.singletonList(textDocumentInput), null)
.map(resultCollectionResponse -> {
PiiEntityCollection entityCollection = null;
for (RecognizePiiEntitiesResult entitiesResult : resultCollectionResponse.getValue()) {
if (entitiesResult.isError()) {
throw logger.logExceptionAsError(toTextAnalyticsException(entitiesResult.getError()));
}
entityCollection = new PiiEntityCollection(entitiesResult.getEntities(),
entitiesResult.getEntities().getWarnings());
}
return entityCollection;
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
} | final TextDocumentInput textDocumentInput = new TextDocumentInput("0", document).setLanguage(language); | Mono<PiiEntityCollection> recognizePiiEntities(String document, String language) {
try {
Objects.requireNonNull(document, "'document' cannot be null.");
return recognizePiiEntitiesBatch(
Collections.singletonList(new TextDocumentInput("0", document).setLanguage(language)), null)
.map(resultCollectionResponse -> {
PiiEntityCollection entityCollection = null;
for (RecognizePiiEntitiesResult entitiesResult : resultCollectionResponse.getValue()) {
if (entitiesResult.isError()) {
throw logger.logExceptionAsError(toTextAnalyticsException(entitiesResult.getError()));
}
entityCollection = new PiiEntityCollection(entitiesResult.getEntities(),
entitiesResult.getEntities().getWarnings());
}
return entityCollection;
});
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
} | class RecognizePiiEntityAsyncClient {
private final ClientLogger logger = new ClientLogger(RecognizePiiEntityAsyncClient.class);
private final TextAnalyticsClientImpl service;
/**
* Create a {@link RecognizePiiEntityAsyncClient} that sends requests to the Text Analytics services's
* recognize Personally Identifiable Information entity endpoint.
*
* @param service The proxy service used to perform REST calls.
*/
RecognizePiiEntityAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
/**
* Helper function for calling service with max overloaded parameters that returns a {@link Mono}
* which contains {@link PiiEntityCollection}.
*
* @param document A single document.
* @param language The language code.
*
* @return The {@link Mono} of {@link PiiEntityCollection}.
*/
/**
* Helper function for calling service with max overloaded parameters.
*
* @param documents The list of documents to recognize Personally Identifiable Information entities for.
* @param options The {@link TextAnalyticsRequestOptions} request options.
*
* @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatch(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options) {
try {
inputDocumentsValidation(documents);
return withContext(context -> getRecognizePiiEntitiesResponse(documents, options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Helper function for calling service with max overloaded parameters with {@link Context} is given.
*
* @param documents The list of documents to recognize Personally Identifiable Information entities for.
* @param options The {@link TextAnalyticsRequestOptions} request options.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatchWithContext(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
try {
inputDocumentsValidation(documents);
return getRecognizePiiEntitiesResponse(documents, options, context);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Helper method to convert the service response of {@link EntitiesResult} to {@link Response} which contains
* {@link RecognizePiiEntitiesResultCollection}.
*
* @param response the {@link Response} of {@link EntitiesResult} returned by the service.
*
* @return A {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
private Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse(
final Response<EntitiesResult> response) {
final EntitiesResult entitiesResult = response.getValue();
final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>();
entitiesResult.getDocuments().forEach(documentEntities -> {
final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(entity ->
new PiiEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()),
entity.getSubcategory(), entity.getOffset(), entity.getLength(),
entity.getConfidenceScore()))
.collect(Collectors.toList());
final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream()
.map(warning -> {
final WarningCodeValue warningCodeValue = warning.getCode();
return new TextAnalyticsWarning(
WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()),
warning.getMessage());
}).collect(Collectors.toList());
recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(
documentEntities.getId(),
documentEntities.getStatistics() == null ? null
: toTextDocumentStatistics(documentEntities.getStatistics()),
null,
new PiiEntityCollection(new IterableStream<>(piiEntities), new IterableStream<>(warnings))
));
});
entitiesResult.getErrors().forEach(documentError -> {
recognizeEntitiesResults.add(
new RecognizePiiEntitiesResult(documentError.getId(), null,
toTextAnalyticsError(documentError.getError()), null));
});
return new SimpleResponse<>(response,
new RecognizePiiEntitiesResultCollection(recognizeEntitiesResults, entitiesResult.getModelVersion(),
entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics())));
}
/**
* Call the service with REST response, convert to a {@link Mono} of {@link Response} that contains
* {@link RecognizePiiEntitiesResultCollection} from a {@link SimpleResponse} of {@link EntitiesResult}.
*
* @param documents The list of documents to recognize Personally Identifiable Information entities for.
* @param options The {@link TextAnalyticsRequestOptions} request options.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
private Mono<Response<RecognizePiiEntitiesResultCollection>> getRecognizePiiEntitiesResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.entitiesRecognitionPiiWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
null,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response ->
logger.info("Recognized Personally Identifiable Information entities for a batch of documents- {}",
response.getValue()))
.doOnError(error ->
logger.warning("Failed to recognize Personally Identifiable Information entities - {}", error))
.map(this::toRecognizePiiEntitiesResultCollectionResponse)
.onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable));
}
} | class RecognizePiiEntityAsyncClient {
private final ClientLogger logger = new ClientLogger(RecognizePiiEntityAsyncClient.class);
private final TextAnalyticsClientImpl service;
/**
* Create a {@link RecognizePiiEntityAsyncClient} that sends requests to the Text Analytics services's
* recognize Personally Identifiable Information entity endpoint.
*
* @param service The proxy service used to perform REST calls.
*/
RecognizePiiEntityAsyncClient(TextAnalyticsClientImpl service) {
this.service = service;
}
/**
* Helper function for calling service with max overloaded parameters that returns a {@link Mono}
* which contains {@link PiiEntityCollection}.
*
* @param document A single document.
* @param language The language code.
*
* @return The {@link Mono} of {@link PiiEntityCollection}.
*/
/**
* Helper function for calling service with max overloaded parameters.
*
* @param documents The list of documents to recognize Personally Identifiable Information entities for.
* @param options The {@link TextAnalyticsRequestOptions} request options.
*
* @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatch(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options) {
try {
inputDocumentsValidation(documents);
return withContext(context -> getRecognizePiiEntitiesResponse(documents, options, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Helper function for calling service with max overloaded parameters with {@link Context} is given.
*
* @param documents The list of documents to recognize Personally Identifiable Information entities for.
* @param options The {@link TextAnalyticsRequestOptions} request options.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatchWithContext(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
try {
inputDocumentsValidation(documents);
return getRecognizePiiEntitiesResponse(documents, options, context);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Helper method to convert the service response of {@link EntitiesResult} to {@link Response} which contains
* {@link RecognizePiiEntitiesResultCollection}.
*
* @param response the {@link Response} of {@link EntitiesResult} returned by the service.
*
* @return A {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
private Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse(
final Response<EntitiesResult> response) {
final EntitiesResult entitiesResult = response.getValue();
final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>();
entitiesResult.getDocuments().forEach(documentEntities -> {
final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(entity ->
new PiiEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()),
entity.getSubcategory(), entity.getConfidenceScore(), entity.getOffset(), entity.getLength()))
.collect(Collectors.toList());
final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream()
.map(warning -> {
final WarningCodeValue warningCodeValue = warning.getCode();
return new TextAnalyticsWarning(
WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()),
warning.getMessage());
}).collect(Collectors.toList());
recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(
documentEntities.getId(),
documentEntities.getStatistics() == null ? null
: toTextDocumentStatistics(documentEntities.getStatistics()),
null,
new PiiEntityCollection(new IterableStream<>(piiEntities), new IterableStream<>(warnings))
));
});
entitiesResult.getErrors().forEach(documentError -> {
recognizeEntitiesResults.add(
new RecognizePiiEntitiesResult(documentError.getId(), null,
toTextAnalyticsError(documentError.getError()), null));
});
return new SimpleResponse<>(response,
new RecognizePiiEntitiesResultCollection(recognizeEntitiesResults, entitiesResult.getModelVersion(),
entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics())));
}
/**
* Call the service with REST response, convert to a {@link Mono} of {@link Response} that contains
* {@link RecognizePiiEntitiesResultCollection} from a {@link SimpleResponse} of {@link EntitiesResult}.
*
* @param documents The list of documents to recognize Personally Identifiable Information entities for.
* @param options The {@link TextAnalyticsRequestOptions} request options.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A mono {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
private Mono<Response<RecognizePiiEntitiesResultCollection>> getRecognizePiiEntitiesResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.entitiesRecognitionPiiWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
null,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info(
"Start recognizing Personally Identifiable Information entities for a batch of documents."))
.doOnSuccess(response -> logger.info(
"Successfully recognized Personally Identifiable Information entities for a batch of documents."))
.doOnError(error ->
logger.warning("Failed to recognize Personally Identifiable Information entities - {}", error))
.map(this::toRecognizePiiEntitiesResultCollectionResponse)
.onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable));
}
} |
Do we need this check here too? | public PiiEntityCollection recognizePiiEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizePiiEntities(document, language).block();
} | Objects.requireNonNull(document, "'document' cannot be null."); | public PiiEntityCollection recognizePiiEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizePiiEntities(document, language).block();
} | class TextAnalyticsClient {
private final TextAnalyticsAsyncClient client;
/**
* Create a {@code TextAnalyticsClient client} that sends requests to the Text Analytics service's endpoint.
* Each service call goes through the {@link TextAnalyticsClientBuilder
*
* @param client The {@link TextAnalyticsClient} that the client routes its request through.
*/
TextAnalyticsClient(TextAnalyticsAsyncClient client) {
this.client = client;
}
/**
* Get default country hint code.
*
* @return The default country hint code
*/
public String getDefaultCountryHint() {
return client.getDefaultCountryHint();
}
/**
* Get default language when the builder is setup.
*
* @return The default language
*/
public String getDefaultLanguage() {
return client.getDefaultLanguage();
}
/**
* Returns the detected language and a confidence score between zero and one. Scores close to one indicate 100%
* certainty that the identified language is true.
*
* This method will use the default country hint that sets up in
* {@link TextAnalyticsClientBuilder
* the country hint.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of single document.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document) {
return detectLanguage(document, client.getDefaultCountryHint());
}
/**
* Returns the detected language and a confidence score between zero and one.
* Scores close to one indicate 100% certainty that the identified language is true.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of documents with a provided country hint.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document, String countryHint) {
return client.detectLanguage(document, countryHint).block();
}
/**
* Detects Language for a batch of document with the provided country hint and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language in a list of documents with a provided country hint and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch
*
* @param documents The list of documents to detect languages for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectLanguageResultCollection detectLanguageBatch(
Iterable<String> documents, String countryHint, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.detectLanguageBatch(documents, countryHint, options).block();
}
/**
* Detects Language for a batch of {@link DetectLanguageInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the languages with http response in a list of {@link DetectLanguageInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch
*
* @param documents The list of {@link DetectLanguageInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<DetectLanguageResultCollection> detectLanguageBatchWithResponse(
Iterable<DetectLanguageInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.detectLanguageAsyncClient.detectLanguageBatchWithContext(documents, options, context).block();
}
/**
* Returns a list of general categorized entities in the provided document.
*
* For a list of supported entity types, check: <a href="https:
*
* This method will use the default language that sets up in
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document) {
return recognizeEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of general categorized entities in the provided document with provided language code.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeEntities(document, language).block();
}
/**
* Returns a list of general categorized entities for the provided list of documents with provided language code
* and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntitiesBatch
*
* @param documents A list of documents to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeEntitiesResultCollection recognizeEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of general categorized entities for the provided list of {@link TextDocumentInput document} with
* provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities with http response in a list of {@link TextDocumentInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeEntitiesResultCollection> recognizeEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeEntityAsyncClient.recognizeEntitiesBatchWithContext(documents, options, context).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
* default language that sets up in {@link TextAnalyticsClientBuilder
* specified, service will use 'en' as the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the PII entities details in a document.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PiiEntityCollection recognizePiiEntities(String document) {
return recognizePiiEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document
* with provided language code.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a document with a provided language code.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of documents with
* provided language code and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a list of documents with a provided language code
* and request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch
*
* @param documents A list of documents to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizePiiEntitiesResultCollection recognizePiiEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
return client.recognizePiiEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of
* {@link TextDocumentInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details with http response in a list of {@link TextDocumentInput document}
* with provided request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizePiiEntitiesResultCollection> recognizePiiEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return client.recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithContext(documents, options,
context).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document.
* See <a href="https:
*
* This method will use the default language that sets up in
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the linked entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document) {
return recognizeLinkedEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document with
* language code.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeLinkedEntities(document, language).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of documents with
* provided language code and request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a list of documents with a provided language code and request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch
*
* @param documents A list of documents to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeLinkedEntitiesResultCollection recognizeLinkedEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of
* {@link TextDocumentInput document} and request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities with http response in a list of {@link TextDocumentInput} with request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeLinkedEntitiesResultCollection>
recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> documents,
TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithContext(
documents, options, context).block();
}
/**
* Returns a list of strings denoting the key phrases in the document.
*
* This method will use the default language that sets up in
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document) {
return extractKeyPhrases(document, client.getDefaultLanguage());
}
/**
* Returns a list of strings denoting the key phrases in the document.
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.extractKeyPhrases(document, language).block();
}
/**
* Returns a list of strings denoting the key phrases in the documents with provided language code and
* request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public ExtractKeyPhrasesResultCollection extractKeyPhrasesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.extractKeyPhrasesBatch(documents, language, options).block();
}
/**
* Returns a list of strings denoting the key phrases in the a batch of {@link TextDocumentInput document} with
* request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases with http response in a list of {@link TextDocumentInput} with request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<ExtractKeyPhrasesResultCollection> extractKeyPhrasesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithContext(documents, options, context)
.block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within i
*
* This method will use the default language that sets up in
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document) {
return analyzeSentiment(document, client.getDefaultLanguage());
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within i
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.analyzeSentiment(document, language).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a list of documents with a provided language representation and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AnalyzeSentimentResultCollection analyzeSentimentBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.analyzeSentimentBatch(documents, language, options).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments with http response in a list of {@link TextDocumentInput documents} with request
* options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AnalyzeSentimentResultCollection> analyzeSentimentBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.analyzeSentimentAsyncClient.analyzeSentimentBatchWithContext(documents, options, context).block();
}
} | class TextAnalyticsClient {
private final TextAnalyticsAsyncClient client;
/**
* Create a {@code TextAnalyticsClient client} that sends requests to the Text Analytics service's endpoint.
* Each service call goes through the {@link TextAnalyticsClientBuilder
*
* @param client The {@link TextAnalyticsClient} that the client routes its request through.
*/
TextAnalyticsClient(TextAnalyticsAsyncClient client) {
this.client = client;
}
/**
* Get default country hint code.
*
* @return The default country hint code
*/
public String getDefaultCountryHint() {
return client.getDefaultCountryHint();
}
/**
* Get default language when the builder is setup.
*
* @return The default language
*/
public String getDefaultLanguage() {
return client.getDefaultLanguage();
}
/**
* Returns the detected language and a confidence score between zero and one. Scores close to one indicate 100%
* certainty that the identified language is true.
*
* This method will use the default country hint that sets up in
* {@link TextAnalyticsClientBuilder
* the country hint.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of single document.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document) {
return detectLanguage(document, client.getDefaultCountryHint());
}
/**
* Returns the detected language and a confidence score between zero and one.
* Scores close to one indicate 100% certainty that the identified language is true.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of documents with a provided country hint.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document, String countryHint) {
return client.detectLanguage(document, countryHint).block();
}
/**
* Detects Language for a batch of document with the provided country hint and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language in a list of documents with a provided country hint and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch
*
* @param documents The list of documents to detect languages for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectLanguageResultCollection detectLanguageBatch(
Iterable<String> documents, String countryHint, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.detectLanguageBatch(documents, countryHint, options).block();
}
/**
* Detects Language for a batch of {@link DetectLanguageInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the languages with http response in a list of {@link DetectLanguageInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch
*
* @param documents The list of {@link DetectLanguageInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<DetectLanguageResultCollection> detectLanguageBatchWithResponse(
Iterable<DetectLanguageInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.detectLanguageAsyncClient.detectLanguageBatchWithContext(documents, options, context).block();
}
/**
* Returns a list of general categorized entities in the provided document.
*
* For a list of supported entity types, check: <a href="https:
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document) {
return recognizeEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of general categorized entities in the provided document with provided language code.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeEntities(document, language).block();
}
/**
* Returns a list of general categorized entities for the provided list of documents with provided language code
* and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntitiesBatch
*
* @param documents A list of documents to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeEntitiesResultCollection recognizeEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of general categorized entities for the provided list of {@link TextDocumentInput document} with
* provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities with http response in a list of {@link TextDocumentInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeEntitiesResultCollection> recognizeEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeEntityAsyncClient.recognizeEntitiesBatchWithContext(documents, options, context).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
* default language that is set using {@link TextAnalyticsClientBuilder
* specified, service will use 'en' as the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the PII entities details in a document.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PiiEntityCollection recognizePiiEntities(String document) {
return recognizePiiEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document
* with provided language code.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a document with a provided language code.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of documents with
* provided language code and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a list of documents with a provided language code
* and request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch
*
* @param documents A list of documents to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizePiiEntitiesResultCollection recognizePiiEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
return client.recognizePiiEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of
* {@link TextDocumentInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details with http response in a list of {@link TextDocumentInput document}
* with provided request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizePiiEntitiesResultCollection> recognizePiiEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return client.recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithContext(documents, options,
context).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document.
* See <a href="https:
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the linked entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document) {
return recognizeLinkedEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document with
* language code.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeLinkedEntities(document, language).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of documents with
* provided language code and request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a list of documents with a provided language code and request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch
*
* @param documents A list of documents to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeLinkedEntitiesResultCollection recognizeLinkedEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of
* {@link TextDocumentInput document} and request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities with http response in a list of {@link TextDocumentInput} with request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeLinkedEntitiesResultCollection>
recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> documents,
TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithContext(
documents, options, context).block();
}
/**
* Returns a list of strings denoting the key phrases in the document.
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document) {
return extractKeyPhrases(document, client.getDefaultLanguage());
}
/**
* Returns a list of strings denoting the key phrases in the document.
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.extractKeyPhrases(document, language).block();
}
/**
* Returns a list of strings denoting the key phrases in the documents with provided language code and
* request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public ExtractKeyPhrasesResultCollection extractKeyPhrasesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.extractKeyPhrasesBatch(documents, language, options).block();
}
/**
* Returns a list of strings denoting the key phrases in the a batch of {@link TextDocumentInput document} with
* request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases with http response in a list of {@link TextDocumentInput} with request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<ExtractKeyPhrasesResultCollection> extractKeyPhrasesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithContext(documents, options, context)
.block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within i
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document) {
return analyzeSentiment(document, client.getDefaultLanguage());
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within i
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.analyzeSentiment(document, language).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a list of documents with a provided language representation and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AnalyzeSentimentResultCollection analyzeSentimentBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.analyzeSentimentBatch(documents, language, options).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments with http response in a list of {@link TextDocumentInput documents} with request
* options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AnalyzeSentimentResultCollection> analyzeSentimentBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.analyzeSentimentAsyncClient.analyzeSentimentBatchWithContext(documents, options, context).block();
}
} |
make sure to keep this concise as this gets added in between the java docs and could make the docs really verbose! | public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 555-55-5555.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, offset: %s, length: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getOffset(),
entity.getLength(), entity.getConfidenceScore())));
});
} | + " entity subcategory: %s, offset: %s, length: %s, confidence score: %f.%n", | public void recognizePiiEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"My SSN is 859-98-0987.",
"Visa card 0111 1111 1111 1111."
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true)
.setModelVersion("latest");
textAnalyticsAsyncClient.recognizePiiEntitiesBatch(documents, "en", requestOptions)
.subscribe(piiEntitiesResults -> {
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 555-55-5555";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, offset: %s, length: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getOffset(),
entity.getLength(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 555-55-5555";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, offset: %s, length: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getOffset(),
entity.getLength(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 555-55-5555."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, offset: %s, length: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getOffset(),
entity.getLength(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", null).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
} | class TextAnalyticsAsyncClientJavaDocCodeSnippets {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient();
/**
* Code snippet for creating a {@link TextAnalyticsAsyncClient}
*
* @return The TextAnalyticsAsyncClient object
*/
public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() {
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("{endpoint}")
.buildAsyncClient();
return textAnalyticsAsyncClient;
}
/**
* Code snippet for updating the existing API key.
*/
public void rotateAzureKeyCredential() {
AzureKeyCredential credential = new AzureKeyCredential("{key}");
TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder()
.credential(credential)
.endpoint("{endpoint}")
.buildAsyncClient();
credential.update("{new_api_key}");
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguage() {
String document = "Bonjour tout le monde";
textAnalyticsAsyncClient.detectLanguage(document).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageWithCountryHint() {
String document = "This text is in English";
String countryHint = "US";
textAnalyticsAsyncClient.detectLanguage(document, countryHint).subscribe(detectedLanguage ->
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore()));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectLanguageStringListWithOptions() {
List<String> documents = Arrays.asList(
"This is written in English",
"Este es un documento escrito en Español."
);
textAnalyticsAsyncClient.detectLanguageBatch(documents, "US", null).subscribe(
batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : batchResult) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void detectBatchLanguagesMaxOverload() {
List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList(
new DetectLanguageInput("1", "This is written in English.", "US"),
new DetectLanguageInput("2", "Este es un documento escrito en Español.", "ES")
);
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
DetectLanguageResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (DetectLanguageResult detectLanguageResult : resultCollection) {
DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage();
System.out.printf("Detected language name: %s, ISO 6391 Name: %s, confidence score: %f.%n",
detectedLanguage.getName(), detectedLanguage.getIso6391Name(),
detectedLanguage.getConfidenceScore());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntities() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document)
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesWithLanguage() {
String document = "Satya Nadella is the CEO of Microsoft";
textAnalyticsAsyncClient.recognizeEntities(document, "en")
.subscribe(entityCollection -> entityCollection.forEach(entity ->
System.out.printf("Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
textAnalyticsAsyncClient.recognizeEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeEntitiesResult ->
recognizeEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized categorized entity: %s, category: %s, confidence score: %f.%n",
entity.getText(),
entity.getCategory(),
entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntities() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document).subscribe(piiEntityCollection ->
piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizePiiEntitiesWithLanguage() {
String document = "My SSN is 859-98-0987";
textAnalyticsAsyncClient.recognizePiiEntities(document, "en")
.subscribe(piiEntityCollection -> piiEntityCollection.forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
/**
* Code snippet for {@link TextAnalyticsAsyncClient
* TextAnalyticsRequestOptions)}
*/
public void recognizeBatchPiiEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "My SSN is 859-98-0987."),
new TextDocumentInput("1", "Visa card 0111 1111 1111 1111."));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
RecognizePiiEntitiesResultCollection piiEntitiesResults = response.getValue();
TextDocumentBatchStatistics batchStatistics = piiEntitiesResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
piiEntitiesResults.forEach(recognizePiiEntitiesResult ->
recognizePiiEntitiesResult.getEntities().forEach(entity -> System.out.printf(
"Recognized Personally Identifiable Information entity: %s, entity category: %s,"
+ " entity subcategory: %s, confidence score: %f.%n",
entity.getText(), entity.getCategory(), entity.getSubcategory(), entity.getConfidenceScore())));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntities() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document).subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesWithLanguage() {
String document = "Old Faithful is a geyser at Yellowstone Park.";
textAnalyticsAsyncClient.recognizeLinkedEntities(document, "en").subscribe(
linkedEntityCollection -> linkedEntityCollection.forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeLinkedEntitiesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Old Faithful is a geyser at Yellowstone Park.",
"Mount Shasta has lenticular clouds."
);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(documents, "en", null)
.subscribe(batchResult -> {
TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
batchResult.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void recognizeBatchLinkedEntitiesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park.").setLanguage("en"),
new TextDocumentInput("1", "Mount Shasta has lenticular clouds.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
RecognizeLinkedEntitiesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
resultCollection.forEach(recognizeLinkedEntitiesResult ->
recognizeLinkedEntitiesResult.getEntities().forEach(linkedEntity -> {
System.out.println("Linked Entities:");
System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n",
linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(),
linkedEntity.getDataSource());
linkedEntity.getMatches().forEach(entityMatch -> System.out.printf(
"Matched entity: %s, confidence score: %.2f.%n",
entityMatch.getText(), entityMatch.getConfidenceScore()));
}));
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrases() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase ->
System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesWithLanguage() {
System.out.println("Extracted phrases:");
textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr")
.subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractKeyPhrasesStringListWithOptions() {
List<String> documents = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
textAnalyticsAsyncClient.extractKeyPhrasesBatch(documents, "en", null).subscribe(
extractKeyPhraseResults -> {
TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
extractKeyPhraseResults.forEach(extractKeyPhraseResult -> {
System.out.println("Extracted phrases:");
extractKeyPhraseResult.getKeyPhrases().forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void extractBatchKeyPhrasesMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "I had a wonderful trip to Seattle last week.").setLanguage("en"),
new TextDocumentInput("1", "I work at Microsoft.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
ExtractKeyPhrasesResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
for (ExtractKeyPhraseResult extractKeyPhraseResult : resultCollection) {
System.out.println("Extracted phrases:");
for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) {
System.out.printf("%s.%n", keyPhrase);
}
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentiment() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document).subscribe(documentSentiment -> {
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf(
"Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentWithLanguage() {
String document = "The hotel was dark and unclean.";
textAnalyticsAsyncClient.analyzeSentiment(document, "en")
.subscribe(documentSentiment -> {
System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment());
for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) {
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, "
+ "negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative());
}
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeSentimentStringListWithOptions() {
List<String> documents = Arrays.asList(
"The hotel was dark and unclean.",
"The restaurant had amazing gnocchi."
);
textAnalyticsAsyncClient.analyzeSentimentBatch(documents, "en", null).subscribe(
response -> {
TextDocumentBatchStatistics batchStatistics = response.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount());
response.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
/**
* Code snippet for {@link TextAnalyticsAsyncClient
*/
public void analyzeBatchSentimentMaxOverload() {
List<TextDocumentInput> textDocumentInputs1 = Arrays.asList(
new TextDocumentInput("0", "The hotel was dark and unclean.").setLanguage("en"),
new TextDocumentInput("1", "The restaurant had amazing gnocchi.").setLanguage("en"));
TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setIncludeStatistics(true);
textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions)
.subscribe(response -> {
System.out.printf("Status code of request response: %d%n", response.getStatusCode());
AnalyzeSentimentResultCollection resultCollection = response.getValue();
TextDocumentBatchStatistics batchStatistics = resultCollection.getStatistics();
System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n",
batchStatistics.getTransactionCount(),
batchStatistics.getValidDocumentCount());
resultCollection.forEach(analyzeSentimentResult -> {
System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId());
DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment();
System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment());
documentSentiment.getSentences().forEach(sentenceSentiment ->
System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, "
+ "neutral score: %.2f, negative score: %.2f.%n",
sentenceSentiment.getSentiment(),
sentenceSentiment.getConfidenceScores().getPositive(),
sentenceSentiment.getConfidenceScores().getNeutral(),
sentenceSentiment.getConfidenceScores().getNegative()));
});
});
}
} |
it check for atomic single document. For atomic single operation, document can't be null but can be empty. For batch operation, document**s** can not be null and empty list. | public PiiEntityCollection recognizePiiEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizePiiEntities(document, language).block();
} | Objects.requireNonNull(document, "'document' cannot be null."); | public PiiEntityCollection recognizePiiEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizePiiEntities(document, language).block();
} | class TextAnalyticsClient {
private final TextAnalyticsAsyncClient client;
/**
* Create a {@code TextAnalyticsClient client} that sends requests to the Text Analytics service's endpoint.
* Each service call goes through the {@link TextAnalyticsClientBuilder
*
* @param client The {@link TextAnalyticsClient} that the client routes its request through.
*/
TextAnalyticsClient(TextAnalyticsAsyncClient client) {
this.client = client;
}
/**
* Get default country hint code.
*
* @return The default country hint code
*/
public String getDefaultCountryHint() {
return client.getDefaultCountryHint();
}
/**
* Get default language when the builder is setup.
*
* @return The default language
*/
public String getDefaultLanguage() {
return client.getDefaultLanguage();
}
/**
* Returns the detected language and a confidence score between zero and one. Scores close to one indicate 100%
* certainty that the identified language is true.
*
* This method will use the default country hint that sets up in
* {@link TextAnalyticsClientBuilder
* the country hint.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of single document.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document) {
return detectLanguage(document, client.getDefaultCountryHint());
}
/**
* Returns the detected language and a confidence score between zero and one.
* Scores close to one indicate 100% certainty that the identified language is true.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of documents with a provided country hint.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document, String countryHint) {
return client.detectLanguage(document, countryHint).block();
}
/**
* Detects Language for a batch of document with the provided country hint and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language in a list of documents with a provided country hint and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch
*
* @param documents The list of documents to detect languages for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectLanguageResultCollection detectLanguageBatch(
Iterable<String> documents, String countryHint, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.detectLanguageBatch(documents, countryHint, options).block();
}
/**
* Detects Language for a batch of {@link DetectLanguageInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the languages with http response in a list of {@link DetectLanguageInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch
*
* @param documents The list of {@link DetectLanguageInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<DetectLanguageResultCollection> detectLanguageBatchWithResponse(
Iterable<DetectLanguageInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.detectLanguageAsyncClient.detectLanguageBatchWithContext(documents, options, context).block();
}
/**
* Returns a list of general categorized entities in the provided document.
*
* For a list of supported entity types, check: <a href="https:
*
* This method will use the default language that sets up in
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document) {
return recognizeEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of general categorized entities in the provided document with provided language code.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeEntities(document, language).block();
}
/**
* Returns a list of general categorized entities for the provided list of documents with provided language code
* and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntitiesBatch
*
* @param documents A list of documents to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeEntitiesResultCollection recognizeEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of general categorized entities for the provided list of {@link TextDocumentInput document} with
* provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities with http response in a list of {@link TextDocumentInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeEntitiesResultCollection> recognizeEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeEntityAsyncClient.recognizeEntitiesBatchWithContext(documents, options, context).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
* default language that sets up in {@link TextAnalyticsClientBuilder
* specified, service will use 'en' as the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the PII entities details in a document.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PiiEntityCollection recognizePiiEntities(String document) {
return recognizePiiEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document
* with provided language code.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a document with a provided language code.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of documents with
* provided language code and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a list of documents with a provided language code
* and request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch
*
* @param documents A list of documents to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizePiiEntitiesResultCollection recognizePiiEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
return client.recognizePiiEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of
* {@link TextDocumentInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details with http response in a list of {@link TextDocumentInput document}
* with provided request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizePiiEntitiesResultCollection> recognizePiiEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return client.recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithContext(documents, options,
context).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document.
* See <a href="https:
*
* This method will use the default language that sets up in
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the linked entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document) {
return recognizeLinkedEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document with
* language code.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeLinkedEntities(document, language).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of documents with
* provided language code and request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a list of documents with a provided language code and request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch
*
* @param documents A list of documents to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeLinkedEntitiesResultCollection recognizeLinkedEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of
* {@link TextDocumentInput document} and request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities with http response in a list of {@link TextDocumentInput} with request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeLinkedEntitiesResultCollection>
recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> documents,
TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithContext(
documents, options, context).block();
}
/**
* Returns a list of strings denoting the key phrases in the document.
*
* This method will use the default language that sets up in
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document) {
return extractKeyPhrases(document, client.getDefaultLanguage());
}
/**
* Returns a list of strings denoting the key phrases in the document.
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.extractKeyPhrases(document, language).block();
}
/**
* Returns a list of strings denoting the key phrases in the documents with provided language code and
* request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public ExtractKeyPhrasesResultCollection extractKeyPhrasesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.extractKeyPhrasesBatch(documents, language, options).block();
}
/**
* Returns a list of strings denoting the key phrases in the a batch of {@link TextDocumentInput document} with
* request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases with http response in a list of {@link TextDocumentInput} with request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<ExtractKeyPhrasesResultCollection> extractKeyPhrasesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithContext(documents, options, context)
.block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within i
*
* This method will use the default language that sets up in
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document) {
return analyzeSentiment(document, client.getDefaultLanguage());
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within i
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.analyzeSentiment(document, language).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a list of documents with a provided language representation and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AnalyzeSentimentResultCollection analyzeSentimentBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.analyzeSentimentBatch(documents, language, options).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments with http response in a list of {@link TextDocumentInput documents} with request
* options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AnalyzeSentimentResultCollection> analyzeSentimentBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.analyzeSentimentAsyncClient.analyzeSentimentBatchWithContext(documents, options, context).block();
}
} | class TextAnalyticsClient {
private final TextAnalyticsAsyncClient client;
/**
* Create a {@code TextAnalyticsClient client} that sends requests to the Text Analytics service's endpoint.
* Each service call goes through the {@link TextAnalyticsClientBuilder
*
* @param client The {@link TextAnalyticsClient} that the client routes its request through.
*/
TextAnalyticsClient(TextAnalyticsAsyncClient client) {
this.client = client;
}
/**
* Get default country hint code.
*
* @return The default country hint code
*/
public String getDefaultCountryHint() {
return client.getDefaultCountryHint();
}
/**
* Get default language when the builder is setup.
*
* @return The default language
*/
public String getDefaultLanguage() {
return client.getDefaultLanguage();
}
/**
* Returns the detected language and a confidence score between zero and one. Scores close to one indicate 100%
* certainty that the identified language is true.
*
* This method will use the default country hint that sets up in
* {@link TextAnalyticsClientBuilder
* the country hint.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of single document.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document) {
return detectLanguage(document, client.getDefaultCountryHint());
}
/**
* Returns the detected language and a confidence score between zero and one.
* Scores close to one indicate 100% certainty that the identified language is true.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language of documents with a provided country hint.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguage
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
*
* @return The {@link DetectedLanguage detected language} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectedLanguage detectLanguage(String document, String countryHint) {
return client.detectLanguage(document, countryHint).block();
}
/**
* Detects Language for a batch of document with the provided country hint and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the language in a list of documents with a provided country hint and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch
*
* @param documents The list of documents to detect languages for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not
* specified. To remove this behavior you can reset this parameter by setting this value to empty string
* {@code countryHint} = "" or "none".
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DetectLanguageResultCollection detectLanguageBatch(
Iterable<String> documents, String countryHint, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.detectLanguageBatch(documents, countryHint, options).block();
}
/**
* Detects Language for a batch of {@link DetectLanguageInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Detects the languages with http response in a list of {@link DetectLanguageInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.detectLanguageBatch
*
* @param documents The list of {@link DetectLanguageInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link DetectLanguageResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<DetectLanguageResultCollection> detectLanguageBatchWithResponse(
Iterable<DetectLanguageInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.detectLanguageAsyncClient.detectLanguageBatchWithContext(documents, options, context).block();
}
/**
* Returns a list of general categorized entities in the provided document.
*
* For a list of supported entity types, check: <a href="https:
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document) {
return recognizeEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of general categorized entities in the provided document with provided language code.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntities
*
* @param document The document to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link CategorizedEntityCollection} contains a list of
* {@link CategorizedEntity recognized categorized entities} and warnings.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CategorizedEntityCollection recognizeEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeEntities(document, language).block();
}
/**
* Returns a list of general categorized entities for the provided list of documents with provided language code
* and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeCategorizedEntitiesBatch
*
* @param documents A list of documents to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeEntitiesResultCollection recognizeEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of general categorized entities for the provided list of {@link TextDocumentInput document} with
* provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the entities with http response in a list of {@link TextDocumentInput document} with provided
* request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeEntitiesResultCollection> recognizeEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeEntityAsyncClient.recognizeEntitiesBatchWithContext(documents, options, context).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
* default language that is set using {@link TextAnalyticsClientBuilder
* specified, service will use 'en' as the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the PII entities details in a document.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public PiiEntityCollection recognizePiiEntities(String document) {
return recognizePiiEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of Personally Identifiable Information(PII) entities in the provided document
* with provided language code.
*
* For a list of supported entity types, check: <a href="https:
* For a list of enabled languages, check: <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a document with a provided language code.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntities
*
* @param document The document to recognize PII entities details for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
*
* @return The {@link PiiEntityCollection recognized PII entities collection}.
*
* @throws NullPointerException if {@code document} is null.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of documents with
* provided language code and request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details in a list of documents with a provided language code
* and request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch
*
* @param documents A list of documents to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizePiiEntitiesResultCollection recognizePiiEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
return client.recognizePiiEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of Personally Identifiable Information(PII) entities for the provided list of
* {@link TextDocumentInput document} with provided request options.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the PII entities details with http response in a list of {@link TextDocumentInput document}
* with provided request options.</p>
*
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizePiiEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize PII entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizePiiEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is null.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizePiiEntitiesResultCollection> recognizePiiEntitiesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return client.recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithContext(documents, options,
context).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document.
* See <a href="https:
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Recognize the linked entities of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document) {
return recognizeLinkedEntities(document, client.getDefaultLanguage());
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the provided document with
* language code.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a document with a provided language code.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntities
*
* @param document The document to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link LinkedEntityCollection} contains a list of {@link LinkedEntity recognized linked entities}.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LinkedEntityCollection recognizeLinkedEntities(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.recognizeLinkedEntities(document, language).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of documents with
* provided language code and request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities in a list of documents with a provided language code and request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch
*
* @param documents A list of documents to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public RecognizeLinkedEntitiesResultCollection recognizeLinkedEntitiesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntitiesBatch(documents, language, options).block();
}
/**
* Returns a list of recognized entities with links to a well-known knowledge base for the list of
* {@link TextDocumentInput document} and request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Recognizes the linked entities with http response in a list of {@link TextDocumentInput} with request options.
* </p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.recognizeLinkedEntitiesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to recognize linked entities for.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link RecognizeLinkedEntitiesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<RecognizeLinkedEntitiesResultCollection>
recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> documents,
TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithContext(
documents, options, context).block();
}
/**
* Returns a list of strings denoting the key phrases in the document.
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document) {
return extractKeyPhrases(document, client.getDefaultLanguage());
}
/**
* Returns a list of strings denoting the key phrases in the document.
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrases
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link KeyPhrasesCollection} contains a list of extracted key phrases.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public KeyPhrasesCollection extractKeyPhrases(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.extractKeyPhrases(document, language).block();
}
/**
* Returns a list of strings denoting the key phrases in the documents with provided language code and
* request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases in a list of documents with a provided language code and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public ExtractKeyPhrasesResultCollection extractKeyPhrasesBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.extractKeyPhrasesBatch(documents, language, options).block();
}
/**
* Returns a list of strings denoting the key phrases in the a batch of {@link TextDocumentInput document} with
* request options.
*
* See <a href="https:
*
* <p><strong>Code Sample</strong></p>
* <p>Extracts key phrases with http response in a list of {@link TextDocumentInput} with request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.extractKeyPhrasesBatch
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link ExtractKeyPhrasesResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<ExtractKeyPhrasesResultCollection> extractKeyPhrasesBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithContext(documents, options, context)
.block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within i
*
* This method will use the default language that can be set by using method
* {@link TextAnalyticsClientBuilder
* the language.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments of documents</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document) {
return analyzeSentiment(document, client.getDefaultLanguage());
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within i
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a document with a provided language representation.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentiment
*
* @param document The document to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the document. If not set, uses "en" for
* English as default.
*
* @return A {@link DocumentSentiment analyzed document sentiment} of the document.
*
* @throws NullPointerException if {@code document} is {@code null}.
* @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public DocumentSentiment analyzeSentiment(String document, String language) {
Objects.requireNonNull(document, "'document' cannot be null.");
return client.analyzeSentiment(document, language).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments in a list of documents with a provided language representation and request options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch
*
* @param documents A list of documents to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param language The 2 letter ISO 639-1 representation of language for the documents. If not set, uses "en" for
* English as default.
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
*
* @return A {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public AnalyzeSentimentResultCollection analyzeSentimentBatch(
Iterable<String> documents, String language, TextAnalyticsRequestOptions options) {
inputDocumentsValidation(documents);
return client.analyzeSentimentBatch(documents, language, options).block();
}
/**
* Returns a sentiment prediction, as well as confidence scores for each sentiment label
* (Positive, Negative, and Neutral) for the document and each sentence within it.
*
* <p><strong>Code Sample</strong></p>
* <p>Analyze the sentiments with http response in a list of {@link TextDocumentInput documents} with request
* options.</p>
* {@codesnippet com.azure.ai.textanalytics.TextAnalyticsClient.analyzeSentimentBatch
*
* @param documents A list of {@link TextDocumentInput documents} to be analyzed.
* For text length limits, maximum batch size, and supported text encoding, see
* <a href="https:
* @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents
* and show statistics.
* @param context Additional context that is passed through the Http pipeline during the service call.
*
* @return A {@link Response} that contains a {@link AnalyzeSentimentResultCollection}.
*
* @throws NullPointerException if {@code documents} is {@code null}.
* @throws IllegalArgumentException if {@code documents} is empty.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<AnalyzeSentimentResultCollection> analyzeSentimentBatchWithResponse(
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
inputDocumentsValidation(documents);
return client.analyzeSentimentAsyncClient.analyzeSentimentBatchWithContext(documents, options, context).block();
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.